@Override public void handle(ActionEvent event) { Optional<Pair<String, String>> opt = new MooConnectDialog().showAndWait(); if(!opt.isPresent()) return; Pair<String, String> pair = opt.get(); String host = pair.getKey(); String port = pair.getValue(); // check values if(!Validation.IP.matches(host)) { new Alert(Alert.AlertType.ERROR, "The host needs to be a valid IP format!").showAndWait(); return; } if(!Validation.INTEGER.matches(port)) { new Alert(Alert.AlertType.ERROR, "The port needs to be a valid integer format!").showAndWait(); return; } if(Moo.getInstance().isConnected()) return; Moo.getInstance().connect("manager", ClientType.INTERFACE, host, Integer.parseInt(port)); }
private void banPlayerAction(ActionEvent e) { BanDialog dial = new BanDialog(player.getName(), "No reason indicated", 0); dial.setTitle("Ban player"); dial.setHeaderText("Do you really want to ban " + player.getName() + " ?"); dial.setContentText("Reason "); Optional<Pair<String, Integer>> result = dial.showAndWait(); if (result.isPresent()) { Pair<String, Integer> defaultValues = new Pair<String, Integer>("No reason indicated", 0); try { server.banPlayer(player, result.orElse(defaultValues).getValue() + "d", result.orElse(defaultValues).getKey()); } catch (RCONServerException e1) { server.logError(e1); } } }
@Override public String headers(IXInfoDTO xInfo, Pair... additionalFieldNames) { StringBuilder buffer = new StringBuilder(); if (xInfo != null) { Field[] fields = xInfo.getClass().getDeclaredFields(); // write id for (Pair additionalFieldName : additionalFieldNames) { buffer.append(additionalFieldName.getKey().toString()).append(SEPERATOR); } for (Field field : fields) { // TODO: ist das gut hier ein Accessible zu setzen? Das würde doch eine private methode zu public machen und // nie wieder zurück tauschen? field.setAccessible(true); buffer.append(field.getName()); buffer.append(SEPERATOR); } if (xInfo instanceof ISegmentXInfoDTO) { buffer.append("direction_tow"); buffer.append(SEPERATOR); } buffer.deleteCharAt(buffer.lastIndexOf(SEPERATOR + "")); } buffer.append(System.getProperty("line.separator")); return buffer.toString(); }
/** * Sets up the {@link ComboBox} listing all node color scheme choices. */ private void setUpNodeColorSchemeComboBox() { nodeColorScheme.setConverter(new StringConverter<Pair<String, ColorScheme>>() { @Override public String toString(final Pair<String, ColorScheme> object) { return object.getKey(); } @Override public Pair<String, ColorScheme> fromString(final String string) { return NODE_COLOR_SCHEMES.stream() .filter(pair -> pair.getKey().equals(string)) .collect(Collectors.toList()).get(0); } }); nodeColorScheme.getItems().addAll(NODE_COLOR_SCHEMES); nodeColorScheme.getSelectionModel().selectFirst(); }
static void LoadPerformanceResults(String fileName, HashMap<Short, Pair<Short, Long>> perfResultsSubpartsRaw) throws FileNotFoundException, IOException { BufferedReader br = new BufferedReader(new FileReader(fileName)); String strLine; while ((strLine = br.readLine()) != null) { if (strLine.contains("trapID,")) { // skip header line } else { String[] cols = strLine.split(","); Short perfID = Short.parseShort(cols[0].trim()); Short prevPerfID = Short.parseShort(cols[1].trim()); Long elapsed = Long.parseLong(cols[2].trim()); perfResultsSubpartsRaw.put(perfID, new Pair(prevPerfID, elapsed)); } } br.close(); }
@Override public List<Servicio> buscarServiciosByTipoServicio(Servicio.TiposServicio tipoServicio) { FachadaIntegracion integra =FachadaIntegracion.newInstance(Servicio.class); integra.begin(); List<Servicio> lista = integra.ejecutarNamedQuery("Servicio.buscarPorTipoServicio", Arrays.asList(new Pair<>("tipoServicio", tipoServicio))); integra.commit(); //El siguiente for es una guarreria de codigo, pero la carga LAZY no funciona bien y la EAGER genera buble infinito for(Servicio serv: lista) { serv.setProveedor(null); serv.setEventoServicios(null); } return lista; }
@Override public List<Servicio> buscarServiciosEntreFechas(ArrayList<String> fechas) { DateFormat format = new SimpleDateFormat("dd-mm-yyyy"); ArrayList<Pair> fechasNuevas = new ArrayList<>(); try { Date fechaIni = format.parse(fechas.get(0)); Date fechaFin = format.parse(fechas.get(1)); fechasNuevas.add(new Pair("fechaIni", fechaIni)); fechasNuevas.add(new Pair("fechaFin", fechaFin)); } catch (ParseException e) { e.printStackTrace(); } FachadaIntegracion fachadaIntegracion = FachadaIntegracion.newInstance(Servicio.class); fachadaIntegracion.begin(); List servicios = fachadaIntegracion.ejecutarNamedQuery("Servicio.buscarEntreFechas", Arrays.asList(fechasNuevas)); fachadaIntegracion.commit(); return servicios; }
public Pair<SparseBitSet, SparseBitSet> Get(State forState, int depth, Stack<GraphNode> jumpStack, SparseBitSet shiftedBitMask) { Map<String, Pair<SparseBitSet, SparseBitSet>> forStateCache = cache.get(forState); if (forStateCache == null) return null; String key = keyForJumpStack(jumpStack); Pair<SparseBitSet, SparseBitSet> cachedUnshiftedMask2UnshiftedRez = forStateCache.get(key); if (cachedUnshiftedMask2UnshiftedRez == null) return null; //check if prevStartingPositions is contained in the prevPoses from cache SparseBitSet unshiftedBitMask = shiftedBitMask.clone(); ShiftOnSentences(unshiftedBitMask, input, depth); //B contained by A = |A and B| == |B| unshiftedBitMask.and(cachedUnshiftedMask2UnshiftedRez.getKey()); if (unshiftedBitMask.cardinality() == 0) return null;//the cached bitmask and the requested bitmask don't intersect at all - return null SparseBitSet shiftedBitMaskIntersection = unshiftedBitMask.clone(); ShiftOnSentences(shiftedBitMaskIntersection, input, -depth); SparseBitSet shiftedRezIntersection = cachedUnshiftedMask2UnshiftedRez.getValue().clone(); ShiftOnSentences(shiftedRezIntersection, input, -depth); shiftedRezIntersection.and(shiftedBitMaskIntersection); return new Pair<SparseBitSet, SparseBitSet>(shiftedBitMaskIntersection, shiftedRezIntersection); }
static void saveLatexPerfLog(ArrayList<Pair<String, Long>> results) { try { // Save performance results also as latex String logFileName = String.format("MPC_PERF_log_%d.tex", System.currentTimeMillis()); FileOutputStream perfFile = new FileOutputStream(logFileName); String tableHeader = "\\begin{tabular}{|l|c|}\n" + "\\hline\n" + "\\textbf{Operation} & \\textbf{Time (ms)} \\\\\n" + "\\hline\n" + "\\hline\n"; perfFile.write(tableHeader.getBytes()); for (Pair<String, Long> measurement : results) { String operation = measurement.getKey(); operation = operation.replace("_", "\\_"); perfFile.write(String.format("%s & %d \\\\ \\hline\n", operation, measurement.getValue()).getBytes()); } String tableFooter = "\\hline\n\\end{tabular}"; perfFile.write(tableFooter.getBytes()); perfFile.close(); } catch (IOException ex) { Logger.getLogger(MPCTestClient.class.getName()).log(Level.SEVERE, null, ex); } }
static void InsertPerfInfoIntoFiles(String basePath, String cardName, String experimentID, HashMap<Short, Pair<Short, Long>> perfResultsSubpartsRaw) throws FileNotFoundException, IOException { File dir = new File(basePath); String[] filesArray = dir.list(); if ((filesArray != null) && (dir.isDirectory() == true)) { // make subdir for results String outputDir = String.format("%s\\perf\\%s\\", basePath, experimentID); new File(outputDir).mkdirs(); for (String fileName : filesArray) { File dir2 = new File(basePath + fileName); if (!dir2.isDirectory()) { InsertPerfInfoIntoFile(String.format("%s\\%s", basePath, fileName), cardName, experimentID, outputDir, perfResultsSubpartsRaw); } } } }
@Override public void internalProcess() { try { while (isRunning && !Thread.currentThread().isInterrupted()) { Pair<byte[], Long> toRead; toRead = byteQueue.poll(5, TimeUnit.SECONDS); if (toRead == null) { continue; } byte[] bytesToRead = toRead.getKey(); if (bytesToRead.length == 0) { isRunning = false; return; } matchThings(bytesToRead, toRead.getValue()); } } catch (InterruptedException e) { Logging.log(e); return; } }
public StreamDataset(String name, StreamingDataSource dataSource, int memoryCapacity) { this.uniqueName = name; this.stream = dataSource; indexes = new ArrayList<>(); indexesActivators = new ArrayList<>(); preprocessor = stream.getPreprocessor(); if(memoryCapacity > 0) streamData = new ArrayBigDataset(memoryCapacity); else streamData = new ArrayBigDataset(); List<List<Microblog>> tempLst = new ArrayList<>(); tempRecoveryDataList = Collections.synchronizedList(tempLst); //if existing stream, load recoveryBlkCounter and latest id Pair<String,String> dirFileidPair = getRecoveryFileIdPrefix(); Pair<Integer, Long> existingIds = KiteUtils.loadStreamIds (dirFileidPair, stream.getScheme()); recoveryBlkCounter = existingIds.getKey(); currId = new IdGenerator(existingIds.getValue()+1); }
/** * Gets the duration and timeunit from string (like 1d or 5h) * * @param str The string * @return The pair with both values */ public static Pair<Integer, TimeUnit> getTime(String str) { if(!Validation.TIME.matches(str)) return null; String timeUnitStr = str.substring(str.length() - 1, str.length()); int duration = Integer.parseInt(str.substring(0, str.length() - 1)); TimeUnit unit = null; switch(timeUnitStr) { case "d": unit = TimeUnit.DAYS; break; case "h": unit = TimeUnit.HOURS; break; case "m": unit = TimeUnit.MINUTES; break; case "s": unit = TimeUnit.SECONDS; break; } return new Pair<>(duration, unit); }
@Test //Filtered results are spread over the dataset, not located directly at the beginning nor the end of the dataset //Only small pre-result list, e.g. 2 entrys public void testFilterSmallInputInMidBigMargin_GLUCOSE_BG() throws ParseException { FilterResult result; FilterResult expectedResult; List<Pair<Date, Date>> expectedTimeSeries = new ArrayList<>(); List<VaultEntry> vaultEntries = new ArrayList<>(dataSet); List<VaultEntryAnnotation> tmpAnnotations; setUpFilterUnderTest(new EventFilter(VaultEntryType.GLUCOSE_BG), 4 * 60); result = filterUnderTest.filter(dataSet); vaultEntries.remove(vaultEntries.size() - 1); vaultEntries.remove(vaultEntries.size() - 1); expectedTimeSeries.add(new Pair(TimestampUtils.createCleanTimestamp("2017.06.29-04:46", "yyyy.MM.dd-HH:mm"), TimestampUtils.createCleanTimestamp("2017.06.29-12:31", "yyyy.MM.dd-HH:mm"))); expectedResult = new FilterResult(vaultEntries, expectedTimeSeries); assertEquals(expectedResult.filteredData, result.filteredData); assertEquals(expectedResult.timeSeries, result.timeSeries); }
@Test public void testDateTimePointFilter_margin_10() throws ParseException { Date dateTimePoint = TestFunctions.creatNewDateToCheckFor("2017.06.29-12:15"); int marginInMinutes = 10; // 2017.06.29-12:05 - 2017.06.29-12:25 instance = new DateTimePointFilter(dateTimePoint,marginInMinutes); FilterResult result = instance.filter(data); List<VaultEntry> filteredData = new ArrayList<>(); filteredData.add(new VaultEntry(VaultEntryType.STRESS, TestFunctions.creatNewDateToCheckFor("2017.06.29-12:11"), 17.25)); filteredData.add(new VaultEntry(VaultEntryType.HEART_RATE_VARIABILITY, TestFunctions.creatNewDateToCheckFor("2017.06.29-12:11"), 41.0, 131.0)); filteredData.add(new VaultEntry(VaultEntryType.HEART_RATE, TestFunctions.creatNewDateToCheckFor("2017.06.29-12:21"), 51.0)); filteredData.add(new VaultEntry(VaultEntryType.HEART_RATE_VARIABILITY, TestFunctions.creatNewDateToCheckFor("2017.06.29-12:21"), 44.0, 127.0)); filteredData.add(new VaultEntry(VaultEntryType.STRESS, TestFunctions.creatNewDateToCheckFor("2017.06.29-12:21"), 18.25)); List<Pair<Date, Date>> timeSeries = new ArrayList<>(); timeSeries.add(new Pair<>(TestFunctions.creatNewDateToCheckFor("2017.06.29-12:11"), TestFunctions.creatNewDateToCheckFor("2017.06.29-12:21"))); FilterResult checkForThisResult = new FilterResult(filteredData, timeSeries); assertEquals(result.filteredData, checkForThisResult.filteredData); assertEquals(result.timeSeries, checkForThisResult.timeSeries); //assertEquals(result, checkForThisResult); }
private Dialog<Pair<String, String>> getDialog() { if (dialog == null) { ResourceBundle i18n = toolBox.getI18nBundle(); MaterialIconFactory iconFactory = MaterialIconFactory.get(); Text icon = iconFactory.createIcon(MaterialIcon.NATURE_PEOPLE, "30px"); dialog = new Dialog<>(); dialog.setTitle(i18n.getString("buttons.sample.dialog.title")); dialog.setHeaderText(i18n.getString("buttons.sample.dialog.header")); dialog.setContentText(i18n.getString("buttons.sample.dialog.content")); dialog.setGraphic(icon); dialog.getDialogPane().getButtonTypes().addAll(ButtonType.OK, ButtonType.CANCEL); dialog.getDialogPane().setContent(getDialogPane()); dialog.getDialogPane().getStylesheets().addAll(toolBox.getStylesheets()); dialog.setResultConverter(dialogButton -> { if (dialogButton == ButtonType.OK) { return new Pair<>(lastSelectedSampler, textField.getText()); } // HACK To track selected sampler after combox is hidden: misbehaving Filter lastSelectedSampler = null; return null; }); } return dialog; }
@Test public void testNormalizeTimeSeriesZeroMargin() throws ParseException { List<Pair<Date, Date>> timeSeries = new ArrayList<>(); List<Pair<Date, Date>> expectedResult = new ArrayList<>(); timeSeries.add(new Pair(TimestampUtils.createCleanTimestamp("2017.06.29-04:53", "yyyy.MM.dd-HH:mm"), TimestampUtils.createCleanTimestamp("2017.06.29-04:56", "yyyy.MM.dd-HH:mm"))); timeSeries.add(new Pair(TimestampUtils.createCleanTimestamp("2017.06.29-04:56", "yyyy.MM.dd-HH:mm"), TimestampUtils.createCleanTimestamp("2017.06.29-04:58", "yyyy.MM.dd-HH:mm"))); expectedResult.add(new Pair(TimestampUtils.createCleanTimestamp("2017.06.29-04:53", "yyyy.MM.dd-HH:mm"), TimestampUtils.createCleanTimestamp("2017.06.29-04:58", "yyyy.MM.dd-HH:mm"))); //no margin, overlapping of entries assertEquals(expectedResult, TimestampUtils.normalizeTimeSeries(timeSeries, 0)); timeSeries = new ArrayList<>(); timeSeries.add(new Pair(TimestampUtils.createCleanTimestamp("2017.06.29-04:53", "yyyy.MM.dd-HH:mm"), TimestampUtils.createCleanTimestamp("2017.06.29-04:56", "yyyy.MM.dd-HH:mm"))); timeSeries.add(new Pair(TimestampUtils.createCleanTimestamp("2017.06.29-04:57", "yyyy.MM.dd-HH:mm"), TimestampUtils.createCleanTimestamp("2017.06.29-04:58", "yyyy.MM.dd-HH:mm"))); //no margin, no overlapping of entries assertEquals(expectedResult, TimestampUtils.normalizeTimeSeries(timeSeries, 0)); }
private static Pair<Pair<Boolean,String>,MQLResults> executeQuery( QueryMetadataEntry queryMetadata) { StreamDataset stream = KiteInstance.getStream (queryMetadata .getStreamName()); boolean successfulExecution = true; String errorMessage = ""; MQLResults results = null; if(stream != null) { results = stream.search (queryMetadata.getQuery(), queryMetadata .getAttributeNames()); if(results == null) { successfulExecution = false; errorMessage = "Query is not successfully executed!"; } } else { successfulExecution = false; errorMessage = "Administration error. Stream "+ queryMetadata.getStreamName()+" is not active on this " + "node."; } return new Pair<>(new Pair<>(successfulExecution, errorMessage), results); }
/** * Host has collected all the shares for the same j, can use Algorithm 4.3 * on all the σi, j to recover σj , obtaining the aggregate signature (σj , * ϵj ). The recipient of (m, j), σ, ϵ can verify the validity of the * signature by checking if ϵ = Hash(R| |Hash(m)| |j), where R = σ ·G +ϵ ·Y. * * @param msgToSign * @param i * @param playersList * @param channel * @param perfResultsList * @param perfFile * @throws NoSuchAlgorithmException * @throws Exception */ static void PerformSignature(BigInteger msgToSign, int counter, ArrayList<MPCPlayer> playersList, ArrayList<Pair<String, Long>> perfResultsList, FileOutputStream perfFile, MPCRunConfig runCfg) throws NoSuchAlgorithmException, Exception { // Sign EC Point byte[] plaintext_sig = mpcGlobals.G.multiply(msgToSign).getEncoded(false); if (!playersList.isEmpty()) { BigInteger sum_s_BI = new BigInteger("0"); BigInteger card_e_BI = new BigInteger("0"); boolean bFirstPlayer = true; for (MPCPlayer player : playersList) { if (bFirstPlayer) { sum_s_BI = player.Sign(QUORUM_INDEX, counter, mpcGlobals.Rands[counter - 1].getEncoded(false), plaintext_sig); card_e_BI = player.GetE(QUORUM_INDEX); bFirstPlayer = false; } else { sum_s_BI = sum_s_BI.add(player.Sign(QUORUM_INDEX, counter, mpcGlobals.Rands[counter - 1].getEncoded(false), plaintext_sig)); sum_s_BI = sum_s_BI.mod(mpcGlobals.n); } } System.out.println(String.format("Sign: %s", Util.bytesToHex(sum_s_BI.toByteArray()))); } }
private static Pair<Integer,Integer> serializeMemoryValue(ArrayList<Long> idList, StreamDataset stream, ByteStream recordBytes, int startInd) { int bytesLen = 0; for(int i = startInd; i < idList.size(); ++i) { Long mid = idList.get(i); Microblog microblog = stream.getRecord(mid); bytesLen += microblog.serialize(recordBytes, stream.getScheme()); //null pointer // exception here, mostly during index flushing, looks like microblog is null for some reason (may be related to discard method) stream.decrementIndexesCount(microblog.getId()); if(bytesLen >= ConstantsAndDefaults.FILE_BLK_SIZE_BYTES) { if((i+1) < idList.size()) return new Pair<Integer, Integer>(bytesLen, i + 1); } } return new Pair<Integer,Integer>(bytesLen,-1); }
/** * Constructor that allows to specify if there is an effect of waves and splash particles. * @param waves Specifies whether the object will have waves * @param splashParticles Specifies whether the object will have splash particles */ public Water(boolean waves, boolean splashParticles) { this.waves = waves; this.splashParticles = splashParticles; this.fixturePairs = new HashSet<Pair<Fixture, Fixture>>(); this.setDebugMode(false); if (waves) { textureWater = new TextureRegion(new Texture(Gdx.files.internal("water.png"))); polyBatch = new PolygonSpriteBatch(); } if (splashParticles) { textureDrop = new Texture(Gdx.files.internal("drop.png")); spriteBatch = new SpriteBatch(); particles = new ArrayList<Particle>(); } shapeBatch = new ShapeRenderer(); shapeBatch.setColor(0, 0.5f, 1, 1); }
private List<String> generateTranslatedGamesNames(List<GameSpec> games, Configuration config) { final String language = config.getLanguage(); final Multilinguism multilinguism = Multilinguism.getSingleton(); return games.stream() .map(gameSpec -> new Pair<>(gameSpec, multilinguism.getTrad(gameSpec.getNameCode(), language))) .map(pair -> { String variationHint = pair.getKey().getVariationHint(); if (variationHint == null) { return pair.getValue(); } return pair.getValue() + " " + variationHint; }).collect(Collectors.toList()); }
public void registerHandler() { Optional<Pair<String, String>> result = signInView.showRegisterDialog(); result.ifPresent(usernamePassword -> { try { apiDriver.register(usernamePassword.getKey(), usernamePassword.getValue()); } catch (ServerUnreachableException | UserAlreadyExistsException e) { ketchupDesktopView.showErrorDialog("Error", e.getMessage()); } }); }
@Override public void post(Object event) { Class<?> eventClass = event.getClass(); while(eventClass != Object.class) { for(Listener.Priority p : Listener.Priority.getInvokeOrder()) { for(Pair<Object, EventHandler> h : getHandlers(eventClass, p)) { EventHandler handler = h.getValue(); handler.handle(event); } } eventClass = eventClass.getSuperclass(); } }
public static List<Pair<String, Attribute>> getStandardFeatureSet(Map<String, Set<Object>> dataDomain) { BaseFeaturesFactory factory = new BaseFeaturesFactory(dataDomain); List<Pair<String, Attribute>> featureVector = new ArrayList<>(); featureVector.add(new Pair(Fields.GOLDEN.name(), factory.getGolden())); featureVector.add(new Pair(Fields.UNIT_STATE.name(), factory.getUnitState())); featureVector.add(new Pair(Fields.TRUSTED_JUDGEMENTS.name(), factory.getTrustedJudgements())); // featureVector.add(new Pair(Fields.LAST_JUDGEMENT_TIME.name(), factory.getLastJudgementTime())); featureVector.add(new Pair(Fields.GENDER_CONFIDENCE.name(), factory.getGenderConfidence())); featureVector.add(new Pair(Fields.PROFILE_EXISTS.name(), factory.getProfileExists())); featureVector.add(new Pair(Fields.PROFILE_EXISTS_CONFIDENCE.name(), factory.getProfileExistsConfidence())); // featureVector.add(new Pair(Fields.PROFILE_CREATION_DATE.name(), factory.getProfileCreationDate())); // featureVector.add(new Pair(Fields.PROFILE_DESCRIPTION.name(), factory.getDescription())); featureVector.add(new Pair(Fields.FAVORITES_NUMBER.name(), factory.getFavoritesNumber())); featureVector.add(new Pair(Fields.GENDER_GOLDEN.name(), factory.getGenderGold())); featureVector.add(new Pair(Fields.LINK_COLOR.name(), factory.getLinkColor())); // featureVector.add(new Pair(Fields.USERNAME.name(), factory.getUserName())); // featureVector.add(new Pair(Fields.RETWEET_COUNT.name(), factory.getRetweetsCount())); featureVector.add(new Pair(Fields.SIDEBAR_COLOR.name(), factory.getSidebarColor())); featureVector.add(new Pair(Fields.TWEETS_COUNT.name(), factory.getTweetsCount())); // featureVector.add(new Pair(Fields.TWEET_LOCATION.name(), factory.getTweetLocation())); // featureVector.add(new Pair(Fields.USER_TIMEZONE.name(), factory.getUserTimezone())); // Class attribute should be last featureVector.add(new Pair(Fields.GENDER.name(), factory.getGender())); return featureVector; }
TreeDrawerNode(TreeNode treeNode, TreeDrawerNode parentDrawer) { Pair<Integer, Integer> trialCoords = new Pair<>(treeNode.getDepth(), parentDrawer.getY()); while (nodeMap.containsKey(trialCoords)) { trialCoords = new Pair<>(trialCoords.getKey(), trialCoords.getValue() + 1); } setup(treeNode, trialCoords.getKey(), trialCoords.getValue()); parent = parentDrawer; }
private Pair<Node, ComboBox<String>> createDisplayOptionsRow() { BorderPane rowDisplay = new BorderPane(); Label label = new Label("Display values as: "); rowDisplay.setLeft(label); ComboBox<String> dropdown = createDisplayOptionsDropdown(); dropdown.setPrefWidth(Integer.MAX_VALUE); rowDisplay.setCenter(dropdown); return new Pair<>(rowDisplay, dropdown); }
private void appendPreprocessedInstruction(String instruction, int lineNumber, boolean newLine) { lineNumAndAsmFileMap.put(mapperIndex, new Pair<>(lineNumber, asmIndex)); firstPassString.append(instruction); if (newLine) { mapperIndex++; firstPassString.append("\n"); } // System.out.println(lineNumber + ": " + instruction); // System.out.println(lineNumber + ": " + instruction); }
static void SavePerformanceResults(HashMap<Short, Pair<Short, Long>> perfResultsSubpartsRaw, String fileName) throws FileNotFoundException, IOException { // Save performance traps into single file FileOutputStream perfLog = new FileOutputStream(fileName); String output = "perfID, previous perfID, time difference between perfID and previous perfID (ms)\n"; perfLog.write(output.getBytes()); for (Short perfID : perfResultsSubpartsRaw.keySet()) { output = String.format("%d, %d, %d\n", perfID, perfResultsSubpartsRaw.get(perfID).getKey(), perfResultsSubpartsRaw.get(perfID).getValue()); perfLog.write(output.getBytes()); } perfLog.close(); }
@Override public void initialize(final URL location, final ResourceBundle resources) { CanvasPresentation.mouseTracker.registerOnMousePressedEventHandler(event -> { // Deselect all elements SelectHelper.clearSelectedElements(); }); activeComponent.addListener((obs, oldComponent, newComponent) -> { if (oldComponent != null) { componentTranslateMap.put(oldComponent, new Pair<>(root.getTranslateX(), root.getTranslateY())); } root.getChildren().removeIf(node -> node instanceof ComponentPresentation); if (newComponent == null) return; // We should not add the new component since it is null (clear the view) if (componentTranslateMap.containsKey(newComponent)) { final Pair<Double, Double> restoreCoordinates = componentTranslateMap.get(newComponent); root.setTranslateX(restoreCoordinates.getKey()); root.setTranslateY(restoreCoordinates.getValue()); } else { root.setTranslateX(GRID_SIZE * 3); root.setTranslateY(GRID_SIZE * 8); } final ComponentPresentation newComponentPresentation = new ComponentPresentation(newComponent); root.getChildren().add(newComponentPresentation); root.requestFocus(); }); leaveTextAreas = () -> {root.requestFocus();}; leaveOnEnterPressed = (keyEvent) -> { if (keyEvent.getCode().equals(KeyCode.ENTER) || keyEvent.getCode().equals(KeyCode.ESCAPE)) { leaveTextAreas(); } }; }
/** * Unregisters a listener * * @param listener The listener to unregister */ public void unregister(Object listener) { if(listener == null) throw new NullPointerException("listener"); for(EnumMap<Listener.Priority, List<Pair<Object, EventHandler>>> map : handlers.values()) { List<Pair<Object, EventHandler>> list = map.get(Listener.Priority.MEDIUM); List<Pair<Object, EventHandler>> toRemove = new ArrayList<>(); for(Pair<Object, EventHandler> pair : list) { if(pair.getKey() == listener) toRemove.add(pair); } list.removeAll(toRemove); } }
public void decrementIndexCount(long id) { Pair<Integer,Integer> locator = locateRecord(id); int chunkId = locator.getKey(); int chunckInd = locator.getValue(); if(chunkId < 0) { if(currIndexesCount[chunckInd] != null) currIndexesCount[chunckInd]--; } else { if(indexesCount.get(chunkId)[chunckInd] != null) indexesCount.get(chunkId)[chunckInd]--; } }
@Test void testAddColumn() { final Pair<String, ColumnType> column = new Pair<>("test_key", ColumnType.INTEGER); fileDatabaseTable.addColumn(column.getKey(), column.getValue()); assertThat(fileDatabaseTable.getColumns().size()).isEqualTo(1); assertThat(fileDatabaseTable.getColumns()).contains(column); }
@RequestMapping(value = "/anadir-servicios-evento/{idEvento}", method = RequestMethod.POST, produces = "application/json") public @ResponseBody ResponseEntity<String> añadirServiciosAEvento(Model model, @PathVariable Long idEvento, @RequestParam(value="servicios[]") String[] servicios) { List<Long> listaIDs = new ArrayList<>(); Contexto contex = null; HttpStatus estatus = HttpStatus.BAD_REQUEST; Arrays.stream(servicios) .mapToLong((str)-> Long.parseLong(str)) .forEach((str)-> listaIDs.add(Long.parseLong(String.valueOf(str)))); if(idEvento<=0) { model.addAttribute("mensajeError", "ID de evento erroneo"); }else{ contex = FactoriaComandos.getInstance().crearComando(ANADIR_SERVICIOS_A_EVENTO).execute(new Pair<>(idEvento, listaIDs)); if(contex.getEvento() == ANADIR_SERVICIOS_A_EVENTO){ model.addAttribute("mensaje", "Añadidos correctamente!"); estatus = HttpStatus.OK; }else if(contex.getEvento() == ERROR_AÑADIR_SERVICIOS_A_EVENTO){ model.addAttribute("mensajeError", "Error al añadir los servicios al evento."); }else if(contex.getEvento() == SERVICIO_CON_FECHAS_OCUPADAS){ model.addAttribute("mensajeError", "Servicio con las fechas ocupadas para ese dia"); }else if(contex.getEvento() == NO_SE_PUDO_AÑADIR_ALGUN_EVENTO){ model.addAttribute("mensajeError", "ERROR. No se pudo agregar algún servicio"); }else if(contex.getEvento() == LISTA_SERVICIOS_VACIA){ model.addAttribute("mensajeError", "Lista de servicios vacía"); }else if(contex.getEvento() == ERROR_BUSCAR_EVENTO){ model.addAttribute("mensajeError", "Error al buscar el evento"); } } return new ResponseEntity<>((contex.getEvento()== ANADIR_SERVICIOS_A_EVENTO)? "Servicios añadidos correctamente" : contex.getEvento().toString(), estatus); }
private static Pair<HashMap<Integer, List<Integer>>, HashMap<Integer, List<Integer>>> PositionsForAnnotationCondition(TransitionCondition condition, IndexedLuceneCorpus input) throws IOException { String key = condition.toString(); if (searchCache.containsKey(input)) if (searchCache.get(input).positionsForMatchingTokens.containsKey(key)) return searchCache.get(input).positionsForMatchingAnnotations.get(key); SparseBitSet ANYforAnnotations = new SparseBitSet(); ANYforAnnotations.set(0, input.getAnnotationsCount()); ANYforAnnotations.locked = true; SparseBitSet data = PositionsForTokenCondition(condition, input, ANYforAnnotations); Pair<HashMap<Integer, List<Integer>>, HashMap<Integer, List<Integer>>> rez = new Pair<HashMap<Integer, List<Integer>>, HashMap<Integer, List<Integer>>>(new HashMap<Integer, List<Integer>>(), new HashMap<Integer, List<Integer>>()); for (int i = data.nextSetBit(); i != -1; i = data.nextSetBit()) { Document doc = input.annotationSearcher.doc(i); int startOfAnnotation = doc.getField("GGS:StartTokenIndex").numericValue().intValue(); int endOfAnnotation = doc.getField("GGS:EndTokenIndex").numericValue().intValue(); List<Integer> allAnnotationsWhichStartHere = rez.getKey().get(startOfAnnotation); if (allAnnotationsWhichStartHere == null) { allAnnotationsWhichStartHere = new ArrayList<>(); rez.getKey().put(startOfAnnotation, allAnnotationsWhichStartHere); } allAnnotationsWhichStartHere.add(i); List<Integer> allAnnotationsWhichEndHere = rez.getValue().get(endOfAnnotation); if (allAnnotationsWhichEndHere == null) { allAnnotationsWhichEndHere = new ArrayList<>(); rez.getValue().put(endOfAnnotation, allAnnotationsWhichEndHere); } allAnnotationsWhichEndHere.add(i); } if (!searchCache.containsKey(input)) searchCache.put(input, new SearchCache()); searchCache.get(input).positionsForMatchingAnnotations.put(key, rez); return rez; }
public Usuario buscarUsuarioByEmail(String email) { FachadaIntegracion integra = FachadaIntegracion.newInstance(Usuario.class); integra.begin(); List list = integra.ejecutarNamedQuery("Usuario.buscarPorEmail", Arrays.asList(new Pair<>("email", email))); integra.commit(); return (list.isEmpty()) ? null : (Usuario) (list.iterator().next()); }
@Override public List buscarServiciosByEvento(Evento evento) { FachadaIntegracion fachadaIntegracion = FachadaIntegracion.newInstance(Servicio.class); fachadaIntegracion.begin(); List servicios = fachadaIntegracion.ejecutarNamedQuery("Servicio.buscarPorEvento", Arrays.asList(new Pair<>("evento", evento))); fachadaIntegracion.commit(); return servicios; }
private Pair<Pair<Boolean,List<Microblog>>,Pair<Long,Long>> getLatestKFromBytes (int k, TemporalPeriod time, byte [] bytes) { //get latest "k" microblogs, within "time" in "bytes" //Assumption 1: microblogs in bytes are ordered by time from // older to more recent ByteStream byteStream = new ByteStream(bytes); List<Microblog> microblogs = new ArrayList<>(k); Microblog tmpMicroblog; Long firstTimestamp = null, lastTimestamp = null; while ((tmpMicroblog = Microblog.deserialize(byteStream, stream .getScheme()))!= null) { if(time == null || time.overlap(tmpMicroblog.getTimestamp())) { //successful search microblogs.add(tmpMicroblog); if(microblogs.size() > k) microblogs.remove(0); //Assumption 1 above } else { if (firstTimestamp == null) firstTimestamp = tmpMicroblog.getTimestamp(); lastTimestamp = tmpMicroblog.getTimestamp(); } } return new Pair<>(new Pair<>(microblogs.size()>0,microblogs),new Pair<> (firstTimestamp,lastTimestamp)); }
public void saveOrUpdateProductsSupplier(Supplier supplier, ArrayList<Pair<Product, Float>> products, boolean update){ for (Pair<Product, Float> p : products) { SupplierProposeProduct supplierProposeProduct = new SupplierProposeProduct(); supplierProposeProduct.setProduct(p.getKey()); supplierProposeProduct.setSellPrice(p.getValue()); supplierProposeProduct.setSupplier(supplier); if(!update) daoProposeProduct.save(supplierProposeProduct); else daoProposeProduct.update(supplierProposeProduct); } }
@Override boolean matchesFilterParameters(VaultEntry entry) { boolean result = false; for (Pair<Date, Date> p : timeSpansForContinuousData) { if (TimestampUtils.withinDateTimeSpan(p.getKey(), p.getValue(), entry.getTimestamp())) { result = true; break; } if (entry.getTimestamp().before(p.getKey())) { //breaks if entry's Timestamp is located before every TimeSpan that will follow now break; } } return result; }