public Set<String> getResourceDomains() { Set<String> set = Sets.<String>newHashSet(); File file1 = new File(this.resourcePackFile, "assets/"); if (file1.isDirectory()) { for (File file2 : file1.listFiles((FileFilter)DirectoryFileFilter.DIRECTORY)) { String s = getRelativeName(file1, file2); if (!s.equals(s.toLowerCase())) { this.logNameNotLowercase(s); } else { set.add(s.substring(0, s.length() - 1)); } } } return set; }
private void removeUselessPages(ApplicationComponent application) { if (application != null) { File ionicPagesDir = new File(ionicWorkDir,"src/pages"); List<String> pageDirectories = new ArrayList<String>(); pageDirectories.add(ionicPagesDir.getAbsolutePath()); List<PageComponent> pages = application.getPageComponentList(); for (PageComponent page : pages) { File pageDir = new File(ionicPagesDir, page.getName()); pageDirectories.add(pageDir.getAbsolutePath()); } for (File dir: FileUtils.listFilesAndDirs(ionicPagesDir, FalseFileFilter.INSTANCE, DirectoryFileFilter.DIRECTORY)) { if (!pageDirectories.contains(dir.getAbsolutePath())) { try { FileUtils.deleteDirectory(dir); } catch(Exception e) {} } } } }
@NotNull @Override public Set<String> getRequiredExtensionNames() { final File extDirectory = new File(this.getRootDirectory(), HybrisConstants.PLATFORM_EXTENSIONS_DIRECTORY_NAME); final Set<String> platformDependencies = Sets.newHashSet(); if (extDirectory.isDirectory()) { final File[] files = extDirectory.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY); if (null != files) { for (File file : files) { platformDependencies.add(file.getName()); } } } return Collections.unmodifiableSet(platformDependencies); }
public void processResultsDirectory(String dirName) { File root = new File(dirName); try { Collection<File> files = FileUtils.listFiles(root, new RegexFileFilter(jmeterJTLFileName), DirectoryFileFilter.DIRECTORY); for (Iterator<File> iterator = files.iterator(); iterator.hasNext();) { File file = (File) iterator.next(); parse(file); } } catch (Exception e) { e.printStackTrace(); } }
/** * Loads the internal region with the configuration in the configDirPath */ public void loadSharedConfigurationFromDisk() throws Exception { lockSharedConfiguration(); File[] groupNames = new File(configDirPath).listFiles((FileFilter) DirectoryFileFilter.INSTANCE); Map<String, Configuration> sharedConfiguration = new HashMap<String, Configuration>(); try { for (File groupName : groupNames) { Configuration configuration = readConfiguration(groupName); sharedConfiguration.put(groupName.getName(), configuration); } Region clusterRegion = getConfigurationRegion(); clusterRegion.clear(); clusterRegion.putAll(sharedConfiguration); // Overwrite the security settings using the locator's properties, ignoring whatever // in the import persistSecuritySettings(clusterRegion); } finally { unlockSharedConfiguration(); } }
public Set<String> getResourceDomains() { Set<String> set = Sets.<String>newHashSet(); File file1 = new File(this.resourcePackFile, "assets/"); if (file1.isDirectory()) { for (File file2 : file1.listFiles((FileFilter)DirectoryFileFilter.DIRECTORY)) { String s = getRelativeName(file1, file2); if (s.equals(s.toLowerCase(java.util.Locale.ROOT))) { set.add(s.substring(0, s.length() - 1)); } else { this.logNameNotLowercase(s); } } } return set; }
public Set<String> getResourceDomains() { Set<String> set = Sets.<String>newHashSet(); File file1 = new File(this.resourcePackFile, "assets/"); if (file1.isDirectory()) { for (File file2 : file1.listFiles((FileFilter)DirectoryFileFilter.DIRECTORY)) { String s = getRelativeName(file1, file2); if (s.equals(s.toLowerCase())) { set.add(s.substring(0, s.length() - 1)); } else { this.logNameNotLowercase(s); } } } return set; }
private List<File> getLocalAlbums() { final String musicDirectory = AppConfiguration.getConfigurationProperty("music.dir"); final File directory = new File(musicDirectory); if (!directory.exists()) { return Collections.emptyList(); } return FileUtils.listFilesAndDirs(directory, DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE) .stream() .filter(file -> !file.getAbsolutePath().equals(musicDirectory)) .sorted(Comparator.comparing(File::getAbsolutePath)) .collect(Collectors.toList()); }
/** * Get list of all existing root sub directories * * @param rootDir * - the root directory to list * @return list of sub directories */ public static Collection<File> listRootDirs(final File rootDir) { final Collection<File> dirList = new ArrayList<File>(); for (final File dir : FileUtils.listFilesAndDirs(rootDir, FalseFileFilter.INSTANCE, DirectoryFileFilter.INSTANCE)) { LOG.debug("Dir: {}", dir); // get only root directories final String name = getTemplateName(rootDir, dir); if (StringUtils.isNotBlank(name) && !name.contains("/")) { dirList.add(dir); } } return dirList; }
public static List<String> findGitRepos(String dirPath) { File dir = new File(dirPath); IOFileFilter gitDirFilter = (IOFileFilter) FileFilterUtils.suffixFileFilter(".git"); IOFileFilter notFile = FileFilterUtils.notFileFilter(TrueFileFilter.INSTANCE); IOFileFilter compositeFilter = FileFilterUtils.and(notFile, gitDirFilter); List<File> files = (List<File>) FileUtils.listFilesAndDirs(dir,compositeFilter,DirectoryFileFilter.INSTANCE); List<String> results = new ArrayList<String>(); for(File f: files) { try { if(!f.getCanonicalPath().endsWith("/.git")) continue; String gitStripped = f.getCanonicalPath().replace("/.git", ""); System.out.println(gitStripped); results.add(gitStripped); } catch (IOException e) { e.printStackTrace(); } } return results; }
public List<File> getDirectoryContents(final String path) { final String rootPath = APPLICATION_PAGES_PATH + ("/".equals(path) ? "" : path); final File rootDir = new File(context.getRealPath(rootPath)); final List<File> allDirectories = Arrays.asList(rootDir.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY)); final List<File> filteredDirectories = new ArrayList<File>(); for (final File currentFile : allDirectories) { final String fullPath = path + currentFile.getName() + "/"; if (!EXCLUDED_DIRS.contains(fullPath)) { filteredDirectories.add(currentFile); } } final List<File> files = Arrays.asList(rootDir.listFiles(CustomizedFile.Type.APPLICATION_PAGE.getFilter())); Collections.sort(filteredDirectories); Collections.sort(files); final List<File> filesAndDirs = new ArrayList<File>(); filesAndDirs.addAll(filteredDirectories); filesAndDirs.addAll(files); return filesAndDirs; }
public int moveToUpload() throws IOException { // put language extensions into array String[] extensions = new String[languages.size()]; for (int i = 0; i<languages.size(); i++) { extensions[i] = languages.get(i).getExtension(); } // for each project, and for each student, collect all the files with extensions in all subdirectories for (File project : projectFolders) { File[] studentFolders = new File(project.getPath()).listFiles((FilenameFilter) DirectoryFileFilter.DIRECTORY); for (File student : studentFolders) { // make a directory in the Upload folder String copyDirectory = uploadFolder + File.separator + prependClean(project.getName()) + File.separator + clean(student.getName()); File studentDirUpload = new File(copyDirectory); // find all the files to copy there and copy them Collection<File> files = FileUtils.listFiles(student, extensions, true); for (File file:files){ FileUtils.copyFileToDirectory(file,studentDirUpload); } } } return 1; }
/** * Scan a directory for packages that match. This method is used prior to * finding a matching directory. Once the package names is matched * handleDir() is used. * * @param classes * The classes that have been found. * @param packageName * The package name for classes to find. * @param dir * The directory to scan. * @param cFilter * The class acceptance filter. */ private static void scanDir(Set<String> classes, String packageName, File dir, ClassPathFilter cFilter) { if (!dir.exists()) { return; } if (dir.isDirectory()) { if (dir.getPath().endsWith(packageName.replace('.', '/'))) { // we have a match handleDir(classes, packageName, dir, cFilter); } else { // no match check next level for (File file : dir.listFiles((FileFilter) new AndFileFilter(DirectoryFileFilter.DIRECTORY, new NotFileFilter(new PrefixFileFilter("."))))) { scanDir(classes, packageName, file, cFilter); } } } // if it is not a directory we don't process it here as we are looking // for directories that start with the packageName. }
private void initWebApps() throws Exception { File webAppsDir = new File(homeDir, "webapps"); File[] dirs = webAppsDir.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY); for (File dir : dirs) { File[] webAppFiles = dir.listFiles((FilenameFilter) new NameFileFilter("webapp.xml")); if (webAppFiles.length == 0) { continue; } File file = webAppFiles[0]; try { WebApp webApp = new WebApp(webAppFiles[0]); webApps.put(webApp.getName(), webApp); webApp.open(); } catch (Exception e) { logger.error(String.format("Error creating webapp \"%s\"", file.getAbsolutePath()), e); } } }
private void cleanupIncrementalBackupDirectory(FolderInfo sourceFolder, List<ItemInfo> currentFolderChildren, File targetDir) { //Metadata File filter IOFileFilter metadataFilter = new MetadataFileFilter(); //List all artifacts Collection<File> artifacts = Sets.newHashSet( targetDir.listFiles((FileFilter) new NotFileFilter(metadataFilter))); cleanArtifacts(currentFolderChildren, artifacts); //List all sub-target metadata Collection<File> subTargetMetadataFiles = FileUtils.listFiles(targetDir, metadataFilter, DirectoryFileFilter.INSTANCE); cleanMetadata(currentFolderChildren, subTargetMetadataFiles); //List all target metadata File targetDirMetadataContainerFolder = getMetadataContainerFolder(targetDir); Collection<File> targetMetadataFiles = FileUtils.listFiles(targetDirMetadataContainerFolder, metadataFilter, DirectoryFileFilter.INSTANCE); cleanTargetMetadata(sourceFolder, targetMetadataFiles); }
public static List<String> loadPackages(MavenProject mavenProject) throws MojoExecutionException { List<String> packages = Lists.newArrayList(); logger.info("Loading packages in " + mavenProject.getBuild().getSourceDirectory() + "..."); File rootDir = new File(mavenProject.getBuild().getSourceDirectory() + "//"); Collection<File> files = FileUtils .listFilesAndDirs(rootDir, DirectoryFileFilter.DIRECTORY, TrueFileFilter.TRUE); for (File file : files) { String pack = file.toString().replace(rootDir.toString(), "").replace(File.separator, "."); if (pack.startsWith(".")) { pack = pack.substring(1, pack.length()); } if (!pack.isEmpty()) { packages.add(pack); } } return packages; }
/** * @param args * @throws IOException */ public static void main(final String[] args) throws IOException { if (args.length != 2) { System.err.println("Usage: <trainDirectory> <N>"); return; } final BinaryJavaAstTreeExtractor treeExtractor = new BinaryJavaAstTreeExtractor( new ParentTypeAnnotatedJavaAstExtractor()); final ContextFreeGrammar cfg = new ContextFreeGrammar(treeExtractor); final Collection<File> files = FileUtils.listFiles(new File(args[0]), cfg.modelledFilesFilter(), DirectoryFileFilter.DIRECTORY); cfg.trainModel(files); for (int i = 0; i < Integer.parseInt(args[1]); i++) { final TreeNode<Integer> randomTree = cfg.generateRandom(); final String code = treeExtractor.getCodeFromTree(randomTree); System.out.println(code); System.out.println("-----------------------------"); } }
/** * Main to create a ruleset from a set of files. * * @param args * <directory> <grammarFile> <extractorClass> * @throws IOException * when a file is not found * @throws ClassNotFoundException * on wrong input * @throws IllegalAccessException * on wrong input * @throws InstantiationException * on wrong input */ public static void main(final String[] args) throws IOException, ClassNotFoundException, InstantiationException, IllegalAccessException { if (args.length != 2) { System.err.println("Usage <directory> <grammarFile>"); return; } try { final ContextFreeGrammar glm = new ContextFreeGrammar( new BinaryJavaAstTreeExtractor( new ParentTypeAnnotatedJavaAstExtractor())); final Collection<File> files = FileUtils.listFiles( new File(args[0]), glm.modelledFilesFilter(), DirectoryFileFilter.DIRECTORY); glm.trainModel(files); Serializer.getSerializer().serialize(glm, args[1]); } catch (Exception e) { LOGGER.severe(ExceptionUtils.getStackTrace(e)); } }
/** * @param format * @param patterns * @param trainDirectory * @param likelihoodThreshold * @return */ private SortedSet<LikelihoodRatio<Integer>> loadData( final AbstractTreeExtractor format, final File trainDirectory, final double likelihoodThreshold) { final Collection<File> trainFiles = FileUtils .listFiles(trainDirectory, JavaTokenizer.javaCodeFileFilter, DirectoryFileFilter.DIRECTORY); for (final File f : trainFiles) { try { final TreeNode<Integer> fileAst = format.getTree(f); final Set<Integer> patternsIdsInFile = patternInFileId(fileAst); cooccurenceData.add(patternsIdsInFile); } catch (final Exception e) { LOGGER.warning("Error in file " + f + " " + ExceptionUtils.getFullStackTrace(e)); } } LOGGER.info("Patterns Loaded, building co-appearing sets..."); // Create co-occuring set final SortedSet<LikelihoodRatio<Integer>> likelyCoappearingElements = cooccurenceData .likelyCoappearingElements(likelihoodThreshold); LOGGER.info("Patterns Built, filtering..."); filterCoappearingPatterns(likelyCoappearingElements); return likelyCoappearingElements; }
/** * Retain only the patterns that appear in the test set. * * @param format * @param testDirectory */ private void removePatternsNotInTest(final AbstractTreeExtractor format, final File testDirectory) { final Collection<File> testFiles = FileUtils .listFiles(testDirectory, JavaTokenizer.javaCodeFileFilter, DirectoryFileFilter.DIRECTORY); final Set<Integer> seen = Sets.newHashSet(); for (final File f : testFiles) { try { final TreeNode<Integer> fileAst = format.getTree(f); final Set<Integer> patternsIdsInFile = patternInFileId(fileAst); seen.addAll(patternsIdsInFile); } catch (final Exception e) { LOGGER.warning("Error in file " + f + " " + ExceptionUtils.getFullStackTrace(e)); } } final Set<Integer> toRemove = Sets.difference( patternDictionary.keySet(), seen).immutableCopy(); for (final int keyToRemove : toRemove) { patternDictionary.remove(keyToRemove); } }
/** * Add all files from a corpus * * @param corpusDirectory */ public void addCorpus(final String corpusDirectory) { final Iterator<File> allFiles = FileUtils.iterateFiles(new File(corpusDirectory), grammar.getTreeExtractor().getTokenizer().getFileFilter(), DirectoryFileFilter.DIRECTORY); while (allFiles.hasNext()) { final File currentSource = allFiles.next(); try { final TreeNode<Integer> tree = grammar.getTreeExtractor().getTree(currentSource); cfgPrior.addCFGRulesFrom(TSGNode.convertTree(tree, 0)); addMatchingNodesToIdioms(tree); } catch (final IOException e) { LOGGER.warning("Failed to load " + currentSource + " because " + ExceptionUtils.getFullStackTrace(e)); } } }
/** * Retain only the patterns that appear in the filter set. * * @param format * @param filterDirectory */ private void removePatternsNotIn(final File filterDirectory) { final Collection<File> filterFiles = FileUtils.listFiles( filterDirectory, JavaTokenizer.javaCodeFileFilter, DirectoryFileFilter.DIRECTORY); final Set<Integer> seen = Sets.newHashSet(); for (final File f : filterFiles) { try { final TreeNode<Integer> fileAst = format.getTree(f); final Set<Integer> patternsIdsInFile = patternInFileId(fileAst); seen.addAll(patternsIdsInFile); } catch (final Exception e) { LOGGER.warning("Error cleaning up in file " + f + " " + ExceptionUtils.getFullStackTrace(e)); } } final Set<Integer> toRemove = Sets.difference( patternDictionary.keySet(), seen).immutableCopy(); for (final int keyToRemove : toRemove) { patternDictionary.remove(keyToRemove); } }
/** * Use the files in the trainset to train the co-occurence weights. * * @param trainDirectory */ private void train(final File trainDirectory) { final Collection<File> testFiles = FileUtils .listFiles(trainDirectory, JavaTokenizer.javaCodeFileFilter, DirectoryFileFilter.DIRECTORY); final JavaASTExtractor ex = new JavaASTExtractor(false); for (final File f : testFiles) { try { final CompilationUnit ast = ex.getAST(f); final PackageInfoExtractor pie = new PackageInfoExtractor(ast); final TreeNode<Integer> fileAst = format.getTree(ast); final Set<Integer> patternsIdsInFile = patternInFileId(fileAst); final List<String> imports = pie.getImports(); patternImportCooccurence.add(parseImports(imports), patternsIdsInFile); } catch (final Exception e) { LOGGER.warning("Error training in file " + f + " " + ExceptionUtils.getFullStackTrace(e)); } } patternImportCooccurence.prune(cooccuringPairsThreshold); }
private void evaluateOnTest(final File testDirectory) { final Collection<File> allFiles = FileUtils .listFiles(testDirectory, JavaTokenizer.javaCodeFileFilter, DirectoryFileFilter.DIRECTORY); final ParallelThreadPool ptp = new ParallelThreadPool(); for (final File f : allFiles) { ptp.pushTask(new Runnable() { @Override public void run() { try { evaluateFile(f); } catch (final IOException e) { LOGGER.warning("Error in file " + f + " " + ExceptionUtils.getFullStackTrace(e)); } } }); } ptp.waitForTermination(); }
/** * @param format * @param patterns * @param directory * @return */ public static Set<TreeNode<Integer>> patternsSeenInCorpus( final AbstractJavaTreeExtractor format, final Set<TreeNode<Integer>> patterns, final File directory) { final Collection<File> allFiles = FileUtils .listFiles(directory, JavaTokenizer.javaCodeFileFilter, DirectoryFileFilter.DIRECTORY); final Set<TreeNode<Integer>> patternSeenInCorpus = Sets .newIdentityHashSet(); for (final File f : allFiles) { try { final TreeNode<Integer> fileAst = format.getTree(f); getPatternsForTree(fileAst, patterns, patternSeenInCorpus); } catch (final IOException e) { PatternInSet.LOGGER .warning(ExceptionUtils.getFullStackTrace(e)); } } return patternSeenInCorpus; }
/** * Filter all patterns so that they are contained in at least one of the * files. * * @param directory * @param nSeenInFiles * number of times seen in the files. */ public void filterFromFiles(final Collection<File> directories, final int nSeenInFiles) { final Multiset<TreeNode<Integer>> patternsSeen = HashMultiset.create(); for (final File directory : directories) { final Collection<File> directoryFiles = FileUtils.listFiles( directory, JavaTokenizer.javaCodeFileFilter, DirectoryFileFilter.DIRECTORY); for (final File f : directoryFiles) { try { // We add the patterns once per file. patternsSeen.addAll(getPatternsFromTree(format.getTree(f)) .elementSet()); } catch (final IOException e) { LOGGER.warning(ExceptionUtils.getFullStackTrace(e)); } } } // patternsSeen now contains the number of files that each pattern has // been seen. final Set<TreeNode<Integer>> toKeep = CollectionUtil .getElementsUpToCount(nSeenInFiles, patternsSeen); patterns.retainAll(toKeep); }
public PatternStatsCalculator(final AbstractJavaTreeExtractor treeFormat, final Set<TreeNode<Integer>> patterns, final File directory) { this.treeFormat = treeFormat; this.patterns = HashMultiset.create(patterns); int currentIdx = 0; for (final Multiset.Entry<TreeNode<Integer>> rule : this.patterns .entrySet()) { patternDictionary.put(rule.getElement(), currentIdx); patternSizes.put(currentIdx, rule.getElement().getTreeSize()); currentIdx++; } allFiles = FileUtils .listFiles(directory, JavaTokenizer.javaCodeFileFilter, DirectoryFileFilter.DIRECTORY); fileSizes = new MapMaker() .concurrencyLevel(ParallelThreadPool.NUM_THREADS) .initialCapacity(allFiles.size()).makeMap(); filePatterns = HashBasedTable.create(allFiles.size(), patterns.size() / 10); filePatternsCount = HashBasedTable.create(allFiles.size(), patterns.size() / 1); }
/** * @param args * @throws IOException * @throws ClassNotFoundException * @throws SerializationException */ public static void main(String[] args) throws ClassNotFoundException, IOException, SerializationException { if (args.length != 4) { System.err .println("Usage <fullNGram> <typeNGram> <ParamCalibrationFiles> <output>"); return; } final AbstractNGramLM fullNGram = ((AbstractNGramLM) Serializer .getSerializer().deserializeFrom(args[0])); final AbstractNGramLM typeNGram = ((AbstractNGramLM) Serializer .getSerializer().deserializeFrom(args[1])); final Collection<File> testFiles = FileUtils.listFiles( new File(args[2]), fullNGram.modelledFilesFilter(), DirectoryFileFilter.DIRECTORY); final IdentifierOnlyCachedNGramLM cachedLM = new IdentifierOnlyCachedNGramLM( fullNGram, typeNGram, fullNGram.getTokenizer() .getIdentifierType(), testFiles); Serializer.getSerializer().serialize(cachedLM, args[3]); }
public static void main(String[] args) throws ClassNotFoundException, IOException, SerializationException { if (args.length != 3) { System.err .println("Usage <fullNGram> <paramTuningFolder> <output>"); return; } final AbstractNGramLM fullNGram = ((AbstractNGramLM) Serializer .getSerializer().deserializeFrom(args[0])); final Collection<File> testFiles = FileUtils.listFiles( new File(args[1]), fullNGram.modelledFilesFilter(), DirectoryFileFilter.DIRECTORY); final SimpleCachedNGramLM cachedLM = new SimpleCachedNGramLM(fullNGram, testFiles); Serializer.getSerializer().serialize(cachedLM, args[2]); }
private double evaluateNumJunkVars() { final Collection<File> allFiles = FileUtils.listFiles(tmpDir, tokenizer.getFileFilter(), DirectoryFileFilter.DIRECTORY); final ParallelThreadPool ptp = new ParallelThreadPool(); final JunkPercentage jp = new JunkPercentage(); for (final File testFile : allFiles) { ptp.pushTask(new JunkRenamingRunnable(allFiles, testFile, jp)); } ptp.waitForTermination(); LOGGER.info("accJunk = " + ((double) jp.nJunkInTotal) / jp.totalVariables); return ((double) jp.nJunkInTotal) / jp.totalVariables; }
public static SortedSet<Suggestion> getVariableSuggestions( final File currentFile, final File directory, final boolean useUNK) throws IOException { final ITokenizer tokenizer = new JavaTokenizer(); final AbstractIdentifierRenamings renamer = new BaseIdentifierRenamings( tokenizer); final Collection<java.io.File> trainingFiles = FileUtils.listFiles( directory, tokenizer.getFileFilter(), DirectoryFileFilter.DIRECTORY); trainingFiles.remove(currentFile); renamer.buildRenamingModel(trainingFiles); final IScopeExtractor scopeExtractor = new VariableScopeExtractor.VariableScopeSnippetExtractor(); final SegmentRenamingSuggestion suggestion = new SegmentRenamingSuggestion( renamer, scopeExtractor, useUNK); return suggestion.rankSuggestions(currentFile); }
private void doFirstScan(final File repositoryDir, final String sha) { for (final File f : FileUtils .listFiles(repositoryDir, JavaTokenizer.javaCodeFileFilter, DirectoryFileFilter.DIRECTORY)) { final String fileInRepo = f.getAbsolutePath().substring( (int) (repositoryDir.getAbsolutePath().length() + 1)); Set<IdentifierInformation> identiferInfos; try { identiferInfos = infoScanner.scanFile(f, sha); identiferInfos .forEach(info -> { final IdentifierInformationThroughTime iitt = new IdentifierInformationThroughTime(); iitt.addInformation(info); currentStateOfIdentifiers.put(fileInRepo, iitt); }); } catch (final IOException e) { LOGGER.severe("Could not find file " + f + "\n" + ExceptionUtils.getFullStackTrace(e)); } } }
/** * @param args */ public static void main(final String[] args) { if (args.length != 1) { System.err.println("Usage <codeFolder>"); System.exit(-1); } final File directory = new File(args[0]); final Collection<File> allFiles = FileUtils .listFiles(directory, JavaTokenizer.javaCodeFileFilter, DirectoryFileFilter.DIRECTORY); final JavaTypeHierarchyExtractor jthe = new JavaTypeHierarchyExtractor(); jthe.addFilesToCorpus(allFiles); System.out.println(jthe); }
/** * Extract the bindings from the input folder to the output file, using the * bindingExtractor. * * @param inputFolder * @param outputFile * @param bindingExtractor * @throws IOException * @throws JsonIOException */ public static void extractBindings(final File inputFolder, final File outputFile, final AbstractJavaNameBindingsExtractor bindingExtractor) throws IOException, JsonIOException { final Collection<File> allFiles = FileUtils .listFiles(inputFolder, JavaTokenizer.javaCodeFileFilter, DirectoryFileFilter.DIRECTORY); final List<SerializableResolvedSourceCode> resolvedCode = allFiles .parallelStream() .map(f -> getResolvedCode(f, bindingExtractor)) .filter(r -> r != null) .map(r -> SerializableResolvedSourceCode .fromResolvedSourceCode(r)) .filter(s -> !s.boundVariables.isEmpty()) .collect(Collectors.toList()); final FileWriter writer = new FileWriter(outputFile); try { final Gson gson = new Gson(); gson.toJson(resolvedCode, writer); } finally { writer.close(); } }
/** * @param args * @throws ClassNotFoundException * @throws IllegalAccessException * @throws InstantiationException */ public static void main(final String[] args) throws InstantiationException, IllegalAccessException, ClassNotFoundException { if (args.length != 2) { System.err.println("Usage: <directory> <tokenizerClass>"); return; } final DistinctTokenCount tokCount = new DistinctTokenCount(args[1]); for (final File fi : FileUtils.listFiles(new File(args[0]), new RegexFileFilter(".*\\.java$"), DirectoryFileFilter.DIRECTORY)) { try { tokCount.addTokens(fi); } catch (final IOException e) { LOGGER.warning(ExceptionUtils.getFullStackTrace(e)); } } tokCount.printCounts(); }
/** * Return collection of all files in directory and sub-directories, ignoring any that * have been specifically excluded in plugin configuration. */ @SuppressWarnings("rawtypes") private Collection<File> getAllFiles(final File directory, final List<File> filesToIgnore) { if (!directory.exists()) { log.warn("Directory does not exist: " + directory.getPath()); return EMPTY_FILE_LIST; } final Collection allFiles = FileUtils.listFiles(directory, TrueFileFilter.TRUE, DirectoryFileFilter.DIRECTORY); final Collection<File> files = new ArrayList<File>(allFiles.size()); for (final Object o : allFiles) { if (o != null && o instanceof File) { final File file = (File) o; if (isFileToIgnore(file, filesToIgnore)) { log.debug("Ignoring : " + file.toString()); } else { log.debug("Adding file: " + file.toString()); files.add(file); } } else { log.warn("Not a file: " + ToStringBuilder.reflectionToString(o)); } } return files; }
public static Collection<File> getLeafDirectories( File repoPath ) { // Using commons-io, if performance or so is a problem it might be worth looking at the Java 8 streams API // e.g. http://blog.jooq.org/2014/01/24/java-8-friday-goodies-the-new-new-io-apis/ // not yet though.. Collection<File> subDirectories = FileUtils.listFilesAndDirs( repoPath, (IOFileFilter) DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE ); Collection<File> leafDirectories = new ArrayList<File>(); for ( File subDirectory : subDirectories ) { if ( isLeafVersionDirectory( subDirectory ) && subDirectory != repoPath ) { leafDirectories.add( subDirectory ); } } return leafDirectories; }