ContainingBranchesGetter(@NotNull VcsLogDataHolder dataHolder, @NotNull Disposable parentDisposable) { myDataHolder = dataHolder; myTaskExecutor = new SequentialLimitedLifoExecutor<Task>(parentDisposable, 10, new ThrowableConsumer<Task, Throwable>() { @Override public void consume(final Task task) throws Throwable { final List<String> branches = task.getContainingBranches(myDataHolder); ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { // if cache is cleared (because of log refresh) during this task execution, // this will put obsolete value into the old instance we don't care anymore task.cache.put(task.hash, branches); notifyListener(); } }); } }); }
AbstractDataGetter(@NotNull VcsLogHashMap hashMap, @NotNull Map<VirtualFile, VcsLogProvider> logProviders, @NotNull VcsCommitCache<Integer, T> cache, @NotNull Disposable parentDisposable) { myHashMap = hashMap; myLogProviders = logProviders; myCache = cache; Disposer.register(parentDisposable, this); myLoader = new SequentialLimitedLifoExecutor<TaskDescriptor>(this, MAX_LOADING_TASKS, new ThrowableConsumer<TaskDescriptor, VcsException>() { @Override public void consume(TaskDescriptor task) throws VcsException { preLoadCommitData(task.myCommits); UIUtil.invokeAndWaitIfNeeded(new Runnable() { @Override public void run() { for (Runnable loadingFinishedListener : myLoadingFinishedListeners) { loadingFinishedListener.run(); } } }); } }); }
@Override public void deleteFile(Object requestor, @NotNull final VirtualFile file) throws IOException { if (file.getParent() == null) { throw new IOException(VfsBundle.message("cannot.delete.root.directory", file.getPath())); } if (!auxDelete(file)) { File ioFile = convertToIOFile(file); if (!FileUtil.delete(ioFile)) { throw new IOException(VfsBundle.message("delete.failed.error", ioFile.getPath())); } } auxNotifyCompleted(new ThrowableConsumer<LocalFileOperationsHandler, IOException>() { @Override public void consume(LocalFileOperationsHandler handler) throws IOException { handler.delete(file); } }); }
@Nullable @Override public CodeStyleScheme importScheme(@NotNull Project project, @NotNull VirtualFile selectedFile, CodeStyleScheme currentScheme, SchemeFactory<CodeStyleScheme> schemeFactory) throws SchemeImportException { final String[] schemeNames = readSchemeNames(selectedFile); final ImportSchemeChooserDialog schemeChooserDialog = new ImportSchemeChooserDialog(project, schemeNames, !currentScheme.isDefault() ? currentScheme.getName() : null); if (! schemeChooserDialog.showAndGet()) return null; final CodeStyleScheme scheme = schemeChooserDialog.isUseCurrentScheme() && (! currentScheme.isDefault()) ? currentScheme : schemeFactory.createNewScheme(schemeChooserDialog.getTargetName()); if (scheme == null) return null; readFromStream(selectedFile, new ThrowableConsumer<InputStream, SchemeImportException>() { @Override public void consume(InputStream stream) throws SchemeImportException { new EclipseCodeStyleImportWorker().importScheme(stream, schemeChooserDialog.getSelectedName(), scheme); } }); return scheme; }
/** * Attempts to read scheme names from the given stream. The stream may contain several schemes in which case all the available * names are returned. * * @param inputStream The input stream to read the name from. * @return Either scheme name or null if the scheme doesn't have a name. * @throws SchemeImportException */ @NotNull private String[] readSchemeNames(@NotNull VirtualFile selectedFile) throws SchemeImportException { final Set<String> names = new HashSet<String>(); final EclipseXmlProfileReader reader = new EclipseXmlProfileReader(new EclipseXmlProfileReader.OptionHandler() { @Override public void handleOption(@NotNull String eclipseKey, @NotNull String value) throws SchemeImportException { // Ignore } @Override public void handleName(String name) { names.add(name); } }); readFromStream(selectedFile, new ThrowableConsumer<InputStream, SchemeImportException>() { @Override public void consume(InputStream stream) throws SchemeImportException { reader.readSettings(stream); } }); return ArrayUtil.toStringArray(names); }
public CachingSvnRepositoryPool(ThrowableConvertor<SVNURL, SVNRepository, SVNException> creator, final int maxCached, final int maxConcurrent, ThrowableConsumer<Pair<SVNURL, SVNRepository>, SVNException> adjuster, final ApplicationLevelNumberConnectionsGuard guard) { myGuard = guard; myLock = new Object(); myConnectionTimeout = DEFAULT_IDLE_TIMEOUT; myCreator = creator; myAdjuster = adjuster; myMaxCached = maxCached > 0 ? maxCached : ourMaxCachedDefault; myMaxConcurrent = maxConcurrent > 0 ? maxConcurrent : ourMaxConcurrentDefault; if (myMaxConcurrent < myMaxCached) { myMaxConcurrent = myMaxCached; } myGroups = new HashMap<String, RepoGroup>(); myDisposed = false; }
private RepoGroup(ThrowableConvertor<SVNURL, SVNRepository, SVNException> creator, int cached, int concurrent, final ThrowableConsumer<Pair<SVNURL, SVNRepository>, SVNException> adjuster, final ApplicationLevelNumberConnectionsGuard guard, final Object waitObj, final long connectionTimeout) { myCreator = creator; myMaxCached = cached; myMaxConcurrent = concurrent; myAdjuster = adjuster; myGuard = guard; myConnectionTimeout = connectionTimeout; myInactive = new TreeMap<Long, SVNRepository>(); myUsed = new HashSet<SVNRepository>(); myDisposed = false; myWait = waitObj; }
private void loadBackwards(SVNURL svnurl) throws SVNException, VcsException { // this method is called when svnurl does not exist in latest repository revision - thus concrete old revision is used for "info" // command to get repository url Info info = myVcs.getInfo(svnurl, myPeg, myPeg); final SVNURL rootURL = info != null ? info.getRepositoryRootURL() : null; final String root = rootURL != null ? rootURL.toString() : ""; String relativeUrl = myUrl; if (myUrl.startsWith(root)) { relativeUrl = myUrl.substring(root.length()); } final RepositoryLogEntryHandler repositoryLogEntryHandler = new RepositoryLogEntryHandler(myVcs, myUrl, SVNRevision.UNDEFINED, relativeUrl, new ThrowableConsumer<VcsFileRevision, SVNException>() { @Override public void consume(VcsFileRevision revision) throws SVNException { myConsumer.consume(revision); } }, rootURL); repositoryLogEntryHandler.setThrowCancelOnMeetPathCreation(true); SvnTarget target = SvnTarget.fromURL(rootURL, myFrom); myVcs.getFactory(target).createHistoryClient() .doLog(target, myFrom, myTo == null ? SVNRevision.create(1) : myTo, false, true, myShowMergeSources && mySupport15, 1, null, repositoryLogEntryHandler); }
private <T, U> U execute(final OperationOnList<T, U> operation, final Object projectOrComponent, final List<T> items, final String progressTitle) throws TfsException { if (items.isEmpty()) { return operation.merge(Collections.<U>emptyList()); } final Collection<U> results = new ArrayList<U>(); TfsUtil.consumeInParts(items, ITEMS_IN_GROUP, new ThrowableConsumer<List<T>, TfsException>() { public void consume(final List<T> ts) throws TfsException { U result = TfsRequestManager.executeRequest(myServerUri, projectOrComponent, new TfsRequestManager.Request<U>(progressTitle) { @Override public U execute(Credentials credentials, URI serverUri, @Nullable ProgressIndicator pi) throws Exception { return operation.execute(ts, credentials, pi); } }); results.add(result); } }); return operation.merge(results); }
public static boolean doActionOnSuffixFile(VirtualFile parentFile, ThrowableConsumer<VirtualFile, IOException> consumer, String suffix) { VirtualFile parent = parentFile.getParent(); if(parent == null) { return false; } VirtualFile metaFile = parent.findChild(parentFile.getName() + suffix); if(metaFile != null) { try { consumer.consume(metaFile); } catch(IOException e) { LOGGER.error(e); } } return false; }
@Override @NotNull public VirtualFile createChildDirectory(final Object requestor, @NotNull final VirtualFile parent, @NotNull final String dir) throws IOException { final File ioDir = new File(convertToIOFile(parent), dir); final boolean succeed = auxCreateDirectory(parent, dir) || ioDir.mkdirs(); auxNotifyCompleted(new ThrowableConsumer<LocalFileOperationsHandler, IOException>() { @Override public void consume(LocalFileOperationsHandler handler) throws IOException { handler.createDirectory(parent, dir); } }); if (!succeed) { throw new IOException("Failed to create directory: " + ioDir.getPath()); } return new FakeVirtualFile(parent, dir); }
@Override public VirtualFile createChildFile(final Object requestor, @NotNull final VirtualFile parent, @NotNull final String file) throws IOException { final File ioFile = new File(convertToIOFile(parent), file); final boolean succeed = auxCreateFile(parent, file) || FileUtil.createIfDoesntExist(ioFile); auxNotifyCompleted(new ThrowableConsumer<LocalFileOperationsHandler, IOException>() { @Override public void consume(LocalFileOperationsHandler handler) throws IOException { handler.createFile(parent, file); } }); if (!succeed) { throw new IOException("Failed to create child file at " + ioFile.getPath()); } return new FakeVirtualFile(parent, file); }
@Override public void moveFile(final Object requestor, @NotNull final VirtualFile file, @NotNull final VirtualFile newParent) throws IOException { if (!auxMove(file, newParent)) { final File ioFrom = convertToIOFile(file); final File ioParent = convertToIOFile(newParent); if (!ioParent.isDirectory()) { throw new IOException("Target '" + ioParent + "' is not a directory"); } if (!ioFrom.renameTo(new File(ioParent, file.getName()))) { throw new IOException("Move failed: '" + file.getPath() + "' to '" + newParent.getPath() +"'"); } } auxNotifyCompleted(new ThrowableConsumer<LocalFileOperationsHandler, IOException>() { @Override public void consume(LocalFileOperationsHandler handler) throws IOException { handler.move(file, newParent); } }); }
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if ("afterDone".equals(method.getName()) && args.length == 1) { ((ThrowableConsumer<LocalFileOperationsHandler, IOException>)args[0]).consume(myParentProxy); return null; } if (LocalFileOperationsHandler.class.equals(method.getDeclaringClass())) { myParent.register(method, args); } if ("equals".equals(method.getName())) { return args[0].equals(this); } else if ("hashCode".equals(method.getName())) { return 1; } return method.invoke(myDelegate, args); }
private void loadBackwards(SVNURL svnurl) throws SVNException, VcsException { final SVNURL rootURL = getRepositoryRoot(svnurl, myFrom); final String root = rootURL.toString(); String relativeUrl = myUrl; if (myUrl.startsWith(root)) { relativeUrl = myUrl.substring(root.length()); } SVNLogClient client = myVcs.createLogClient(); final RepositoryLogEntryHandler repositoryLogEntryHandler = new RepositoryLogEntryHandler(myVcs, myUrl, SVNRevision.UNDEFINED, relativeUrl, new ThrowableConsumer<VcsFileRevision, SVNException>() { @Override public void consume(VcsFileRevision revision) throws SVNException { myConsumer.consume(revision); throw new SVNCancelException(); // load only one revision } }, rootURL); repositoryLogEntryHandler.setThrowCancelOnMeetPathCreation(true); client.doLog(rootURL, new String[]{}, myFrom, myFrom, myTo == null ? SVNRevision.create(1) : myTo, false, true, myShowMergeSources && mySupport15, 0, null, repositoryLogEntryHandler); }
public void initialize() { final StopWatch initSw = StopWatch.start("initialize"); myDataLoaderQueue.clear(); runInBackground(new ThrowableConsumer<ProgressIndicator, VcsException>() { @Override public void consume(ProgressIndicator indicator) throws VcsException { resetState(); readCurrentUser(); DataPack dataPack = myRefresher.readFirstBlock(); myDataPackUpdateHandler.consume(dataPack); initSw.report(); } }, "Loading History..."); }
private void runInBackground(final ThrowableConsumer<ProgressIndicator, VcsException> task, final String title) { myDataLoaderQueue.run(new Task.Backgroundable(myProject, title, false) { @Override public void run(@NotNull ProgressIndicator indicator) { indicator.setIndeterminate(true); try { task.consume(indicator); } catch (VcsException e) { throw new RuntimeException(e); // TODO } } }); }
public SequentialLimitedLifoExecutor(Disposable parentDisposable, int maxTasks, @NotNull ThrowableConsumer<Task, ? extends Throwable> loadProcess) { myMaxTasks = maxTasks; myLoadProcess = loadProcess; myLoader = new QueueProcessor<Task>(new DetailsLoadingTask()); Disposer.register(parentDisposable, this); }
@Override @NotNull public VirtualFile createChildDirectory(Object requestor, @NotNull final VirtualFile parent, @NotNull final String dir) throws IOException { if (!VirtualFile.isValidName(dir)) { throw new IOException(VfsBundle.message("directory.invalid.name.error", dir)); } if (!parent.exists() || !parent.isDirectory()) { throw new IOException(VfsBundle.message("vfs.target.not.directory.error", parent.getPath())); } if (parent.findChild(dir) != null) { throw new IOException(VfsBundle.message("vfs.target.already.exists.error", parent.getPath() + "/" + dir)); } File ioParent = convertToIOFile(parent); if (!ioParent.isDirectory()) { throw new IOException(VfsBundle.message("target.not.directory.error", ioParent.getPath())); } if (!auxCreateDirectory(parent, dir)) { File ioDir = new File(ioParent, dir); if (!(ioDir.mkdirs() || ioDir.isDirectory())) { throw new IOException(VfsBundle.message("new.directory.failed.error", ioDir.getPath())); } } auxNotifyCompleted(new ThrowableConsumer<LocalFileOperationsHandler, IOException>() { @Override public void consume(LocalFileOperationsHandler handler) throws IOException { handler.createDirectory(parent, dir); } }); return new FakeVirtualFile(parent, dir); }
@NotNull @Override public VirtualFile createChildFile(Object requestor, @NotNull final VirtualFile parent, @NotNull final String file) throws IOException { if (!VirtualFile.isValidName(file)) { throw new IOException(VfsBundle.message("file.invalid.name.error", file)); } if (!parent.exists() || !parent.isDirectory()) { throw new IOException(VfsBundle.message("vfs.target.not.directory.error", parent.getPath())); } if (parent.findChild(file) != null) { throw new IOException(VfsBundle.message("vfs.target.already.exists.error", parent.getPath() + "/" + file)); } File ioParent = convertToIOFile(parent); if (!ioParent.isDirectory()) { throw new IOException(VfsBundle.message("target.not.directory.error", ioParent.getPath())); } if (!auxCreateFile(parent, file)) { File ioFile = new File(ioParent, file); if (!FileUtil.createIfDoesntExist(ioFile)) { throw new IOException(VfsBundle.message("new.file.failed.error", ioFile.getPath())); } } auxNotifyCompleted(new ThrowableConsumer<LocalFileOperationsHandler, IOException>() { @Override public void consume(LocalFileOperationsHandler handler) throws IOException { handler.createFile(parent, file); } }); return new FakeVirtualFile(parent, file); }
public VcsSqliteLayer(final Project project, KnownRepositoryLocations locations) { myKnownRepositoryLocations = locations; myConnection = new CacheJdbcConnection(DbSettings.getDbFilePath(project), new ThrowableConsumer<Connection, VcsException>() { @Override public void consume(Connection connection) throws VcsException { initDb(connection); } }); }
private void updateRenderer(final boolean updateProperties) { if (myConfiguration == null) { return; } if (myRootComponent == null) { reparseFile(); return; } createRenderer(new ThrowableConsumer<RenderResult, Throwable>() { @Override public void consume(RenderResult result) throws Throwable { RenderSession session = result.getSession(); if (session == null || session.getImage() == null) { return; } updateDeviceFrameVisibility(result); myRootComponent = RadModelBuilder.update(AndroidDesignerEditorPanel.this, result, (RadViewComponent)myRootComponent, myRootView); myRootView.setRenderedImage(result.getImage()); zoomToFitIfNecessary(); myLayeredPane.revalidate(); myHorizontalCaption.update(); myVerticalCaption.update(); DesignerToolWindow toolWindow = getToolWindow(); if (toolWindow != null) { toolWindow.refresh(updateProperties); } if (RenderPreviewMode.getCurrent() != RenderPreviewMode.NONE) { RenderPreviewManager previewManager = getPreviewManager(true); if (previewManager != null) { previewManager.renderPreviews(); } } } }); }
private static ThrowableConsumer<VcsFileRevision, SVNException> createConsumerAdapter(final Consumer<VcsFileRevision> consumer) { return new ThrowableConsumer<VcsFileRevision, SVNException>() { @Override public void consume(VcsFileRevision revision) throws SVNException { consumer.consume(revision); } }; }
public RepositoryLogEntryHandler(final SvnVcs vcs, final String url, final SVNRevision pegRevision, String lastPath, final ThrowableConsumer<VcsFileRevision, SVNException> result, SVNURL repoRootURL) throws VcsException, SVNException { super(vcs, url, pegRevision, lastPath, result, repoRootURL, null); }
public void getCommittedChangesWithMergedRevisons(@NotNull ChangeBrowserSettings settings, @NotNull RepositoryLocation location, int maxCount, @NotNull final PairConsumer<SvnChangeList, LogHierarchyNode> finalConsumer) throws VcsException { final SvnRepositoryLocation svnLocation = (SvnRepositoryLocation) location; final String repositoryRoot = getRepositoryRoot(svnLocation); final MergeSourceHierarchyBuilder builder = new MergeSourceHierarchyBuilder(new Consumer<LogHierarchyNode>() { public void consume(LogHierarchyNode node) { finalConsumer.consume(new SvnChangeList(myVcs, svnLocation, node.getMe(), repositoryRoot), node); } }); final SvnMergeSourceTracker mergeSourceTracker = new SvnMergeSourceTracker(new ThrowableConsumer<Pair<LogEntry, Integer>, SVNException>() { public void consume(Pair<LogEntry, Integer> svnLogEntryIntegerPair) throws SVNException { builder.consume(svnLogEntryIntegerPair); } }); getCommittedChangesImpl(settings, SvnTarget.fromURL(svnLocation.toSvnUrl()), maxCount, new Consumer<LogEntry>() { public void consume(final LogEntry svnLogEntry) { try { mergeSourceTracker.consume(svnLogEntry); } catch (SVNException e) { throw new RuntimeException(e); // will not occur actually but anyway never eat them } } }, true, false); builder.finish(); }
public static <T, E extends Throwable> void consumeInParts(List<T> items, int maxPartSize, ThrowableConsumer<List<T>, E> consumer) throws E { for (int group = 0; group <= items.size() / maxPartSize; group++) { List<T> subList = items.subList(group * maxPartSize, Math.min((group + 1) * maxPartSize, items.size())); if (!subList.isEmpty()) { consumer.consume(subList); } } }
private static boolean doActionOnMetaFile(VirtualFile parentFile, ThrowableConsumer<VirtualFile, IOException> consumer) { if(parentFile.getFileType() == Unity3dMetaFileType.INSTANCE) { return false; } return doActionOnSuffixFile(parentFile, consumer, ourMetaSuffix); }
@Override public void deleteFile(final Object requestor, @NotNull final VirtualFile file) throws IOException { if (!auxDelete(file)) { delete(convertToIOFile(file)); } auxNotifyCompleted(new ThrowableConsumer<LocalFileOperationsHandler, IOException>() { @Override public void consume(LocalFileOperationsHandler handler) throws IOException { handler.delete(file); } }); }
@Nullable private static GithubInfo loadGithubInfoWithModal(@NotNull final Project project) { final Ref<GithubInfo> githubInfoRef = new Ref<GithubInfo>(); final Ref<IOException> exceptionRef = new Ref<IOException>(); ProgressManager.getInstance().run(new Task.Modal(project, "Access to GitHub", true) { public void run(@NotNull ProgressIndicator indicator) { try { // get existing github repos (network) and validate auth data final Ref<List<GithubRepo>> availableReposRef = new Ref<List<GithubRepo>>(); final GithubAuthData auth = GithubUtil.runAndGetValidAuth(project, indicator, new ThrowableConsumer<GithubAuthData, IOException>() { @Override public void consume(GithubAuthData authData) throws IOException { availableReposRef.set(GithubApiUtil.getAvailableRepos(authData)); } }); final HashSet<String> names = new HashSet<String>(); for (GithubRepo info : availableReposRef.get()) { names.add(info.getName()); } // check access to private repos (network) final GithubUserDetailed userInfo = GithubApiUtil.getCurrentUserDetailed(auth); githubInfoRef.set(new GithubInfo(auth, userInfo, names)); } catch (IOException e) { exceptionRef.set(e); } } }); if (!exceptionRef.isNull()) { if (exceptionRef.get() instanceof GithubAuthenticationCanceledException) { return null; } GithubNotifications.showErrorDialog(project, "Failed to connect to GitHub", exceptionRef.get()); return null; } return githubInfoRef.get(); }
@Nullable private static GithubInfo loadGithubInfoWithModal(@NotNull final Project project, @NotNull final GithubFullPath userAndRepo) { final Ref<GithubInfo> githubInfoRef = new Ref<GithubInfo>(); final Ref<IOException> exceptionRef = new Ref<IOException>(); ProgressManager.getInstance().run(new Task.Modal(project, "Access to GitHub", true) { public void run(@NotNull ProgressIndicator indicator) { try { final Ref<GithubRepoDetailed> reposRef = new Ref<GithubRepoDetailed>(); final GithubAuthData auth = GithubUtil.runAndGetValidAuth(project, indicator, new ThrowableConsumer<GithubAuthData, IOException>() { @Override public void consume(GithubAuthData authData) throws IOException { reposRef.set(GithubApiUtil.getDetailedRepoInfo(authData, userAndRepo.getUser(), userAndRepo.getRepository())); } }); githubInfoRef.set(new GithubInfo(auth, reposRef.get())); } catch (IOException e) { exceptionRef.set(e); } } }); if (!exceptionRef.isNull()) { if (exceptionRef.get() instanceof GithubAuthenticationCanceledException) { return null; } GithubNotifications.showErrorDialog(project, CANNOT_CREATE_PULL_REQUEST, exceptionRef.get()); return null; } return githubInfoRef.get(); }
/** * Writes the given collection to the output using the given procedure to write each element. * Should be coupled with {@link #readSeq} */ public static <T> void writeSeq(@Nonnull DataOutput out, @Nonnull Collection<T> collection, @Nonnull ThrowableConsumer<T, IOException> writeElement) throws IOException { writeINT(out, collection.size()); for (T t : collection) { writeElement.consume(t); } }
private void runInBackground(@Nonnull ThrowableConsumer<ProgressIndicator, VcsException> task) { Task.Backgroundable backgroundable = new Task.Backgroundable(myProject, "Loading History...", false) { @Override public void run(@Nonnull ProgressIndicator indicator) { indicator.setIndeterminate(true); try { task.consume(indicator); } catch (VcsException e) { throw new RuntimeException(e); // TODO } } }; myDataLoaderQueue.run(backgroundable, null, myRefresher.getProgress().createProgressIndicator()); }
public SequentialLimitedLifoExecutor(Disposable parentDisposable, int maxTasks, @Nonnull ThrowableConsumer<Task, ? extends Throwable> loadProcess) { myMaxTasks = maxTasks; myLoadProcess = loadProcess; myLoader = new QueueProcessor<>(new DetailsLoadingTask()); Disposer.register(parentDisposable, this); }
/** * Execute function for each chunk of arguments. Check for being cancelled in process. * * @param arguments the arguments to chunk * @param groupSize size of argument groups that should be put in the same chunk (like a name and a value) * @param consumer consumer to feed each chunk * @throws VcsException */ public static void foreachChunk(@Nonnull List<String> arguments, int groupSize, @Nonnull ThrowableConsumer<List<String>, VcsException> consumer) throws VcsException { List<List<String>> chunks = chunkArguments(arguments, groupSize); for (List<String> chunk : chunks) { ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator(); if (indicator != null) indicator.checkCanceled(); consumer.consume(chunk); } }