/** * Get the currently set up send span * * @return TimeSpan The send span */ private TimeSpan getSendSpan() { TimeSpan start = null; IntRange range = new IntRange(Math.round(jSendTimeFrame.getMinimumColoredValue()), Math.round(jSendTimeFrame.getMaximumColoredValue())); if (range.getMinimumInteger() == range.getMaximumInteger() && !jExactTimeButton.isSelected()) { return null; } if (jAlwaysButton.isSelected()) { start = new TimeSpan(range); } else if (jDayButton.isSelected()) { start = new TimeSpan(dateTimeField.getSelectedDate(), range); } else if (jExactTimeButton.isSelected()) { start = new TimeSpan(dateTimeField.getSelectedDate()); } if (start != null) { start.setDirection(TimeSpan.DIRECTION.SEND); } return start; }
/** * Get the currently set up arrive span * * @return TimeSpan The arrive span */ private TimeSpan getArriveSpan() { TimeSpan arrive = null; IntRange range = new IntRange(Math.round(jSendTimeFrame.getMinimumColoredValue()), Math.round(jSendTimeFrame.getMaximumColoredValue())); if (range.getMinimumInteger() == range.getMaximumInteger() && !jExactTimeButton.isSelected()) { return null; } if (jAlwaysButton.isSelected()) { arrive = new TimeSpan(range); } else if (jDayButton.isSelected()) { arrive = new TimeSpan(dateTimeField.getSelectedDate(), range); } else if (jExactTimeButton.isSelected()) { arrive = new TimeSpan(dateTimeField.getSelectedDate()); } if (arrive != null) { arrive.setDirection(TimeSpan.DIRECTION.ARRIVE); } return arrive; }
/** * Print range of program runtime instructions * @param DMLInstMap Mapping between source code line number and corresponding runtime instruction(s) * @param range Range of lines of DML code to be displayed */ public void printRuntimeInstructions(TreeMap<Integer, ArrayList<Instruction>> DMLInstMap, IntRange range) { //Display instructions for (int lineNumber=range.getMinimumInteger() ; lineNumber<=range.getMaximumInteger() ; lineNumber++) { if (DMLInstMap.get(lineNumber) != null) { for (Instruction currInst : DMLInstMap.get(lineNumber)) { if (currInst instanceof CPInstruction) System.out.format("\t\t id %4d: %s\n", currInst.getInstID(), prepareInstruction(currInst.toString())); else if (currInst instanceof MRJobInstruction) { MRJobInstruction currMRInst = (MRJobInstruction) currInst; System.out.format("\t\t id %4d: %s\n", currInst.getInstID(), prepareInstruction(currMRInst.getMRString(false))); } else if (currInst instanceof BreakPointInstruction) { BreakPointInstruction currBPInst = (BreakPointInstruction) currInst; System.out.format("\t\t id %4d: %s\n", currInst.getInstID(), currBPInst.toString()); } } } } }
@Test public void testInRangeSuccess(){ Assert.assertTrue(4 == ParamUtils.inRange(4L, 3, 6, "Range calculation did not work properly"), "Did not return proper value"); Assert.assertTrue(4 == ParamUtils.inRange(4, 3, 6, "Range calculation did not work properly"), "Did not return proper value"); Assert.assertTrue(4 == ParamUtils.inRange(new IntRange(3, 6), 4, "error")); Assert.assertTrue(4.1 == ParamUtils.inRange(4.1, 3, 6, "Range calculation did not work properly"), "Did not return proper value"); Assert.assertTrue(4.1 == ParamUtils.inRange(4.1, -3, 6, "Range calculation did not work properly"), "Did not return proper value"); Assert.assertTrue(4.1 == ParamUtils.inRange(new DoubleRange(-3, 6), 4.1, "error")); Assert.assertTrue(0 == ParamUtils.inRange(0L, -3, 6, "Range calculation did not work properly"), "Did not return proper value"); Assert.assertTrue(0 == ParamUtils.inRange(0, -3, 6, "Range calculation did not work properly"), "Did not return proper value"); Assert.assertTrue(0 == ParamUtils.inRange(new IntRange(-3, 6), 0, "error")); Assert.assertTrue(0.0 == ParamUtils.inRange(0.0, -3, 6, "Range calculation did not work properly"), "Did not return proper value"); Assert.assertTrue(0.0 == ParamUtils.inRange(new DoubleRange(-3, 6), 0.0, "error")); Assert.assertTrue(0 == ParamUtils.inRange(0L, -3, 6, "Range calculation did not work properly"), "Did not return proper value"); Assert.assertTrue(0 == ParamUtils.inRange(0, -3, 6, "Range calculation did not work properly"), "Did not return proper value"); Assert.assertTrue(0 == ParamUtils.inRange(new IntRange(-3, 6), 0, "error")); Assert.assertTrue(-1 == ParamUtils.inRange(-1L, -3, 6, "Range calculation did not work properly"), "Did not return proper value"); Assert.assertTrue(-1 == ParamUtils.inRange(-1, -3, 6, "Range calculation did not work properly"), "Did not return proper value"); Assert.assertTrue(-1 == ParamUtils.inRange(new IntRange(-3, 6), -1, "error")); Assert.assertTrue(-1.5 == ParamUtils.inRange(-1.5, -3, 6, "Range calculation did not work properly"), "Did not return proper value"); Assert.assertTrue(-1.5 == ParamUtils.inRange(new DoubleRange(-3, 6), -1.5, "error")); }
private void assertBasicPoNAssumptions(final File ponFile, final File initialTargetsFileUsedToCreatePoN) { try (final HDF5File ponHDF5File = new HDF5File(ponFile)) { final HDF5PCACoveragePoN pon = new HDF5PCACoveragePoN(ponHDF5File); Assert.assertTrue(pon.getTargets().size() >= pon.getPanelTargets().size()); Assert.assertTrue(pon.getRawTargets().size() > pon.getTargets().size()); Assert.assertTrue(pon.getTargetNames().size() == pon.getTargets().size()); Assert.assertTrue(pon.getPanelTargetNames().size() == pon.getPanelTargetNames().size()); Assert.assertTrue(pon.getRawTargetNames().size() == pon.getRawTargetNames().size()); if (initialTargetsFileUsedToCreatePoN != null) { final TargetCollection<Target> tc = TargetArgumentCollection.readTargetCollection(initialTargetsFileUsedToCreatePoN); Assert.assertEquals(pon.getRawTargets().size(), tc.targetCount()); // Check that the raw targets are the same Assert.assertTrue(IntStream.of(new IntRange(0, pon.getRawTargets().size()-1).toArray()).boxed().map(i -> pon.getRawTargets().get(i).equals(tc.target(i))).allMatch(t -> t)); } } }
private void buildCardinalityRangeMaps(String file, HashMap<String, Integer> cardinality, HashMap<String, IntRange> range) throws JsonParseException, JsonMappingException, IOException { List<SchemaAnnotation> saList; if (file == null) { return; // Nothing to do here. } ObjectMapper objectMapper = new ObjectMapper(); saList = objectMapper.readValue(new File(file), new TypeReference<List<SchemaAnnotation>>() { }); for (SchemaAnnotation sa : saList) { String column = sa.getColumn(); if (sa.isRange()) { range.put(column, new IntRange(sa.getRangeStart(), sa.getRangeEnd())); } else { cardinality.put(column, sa.getCardinality()); } } }
public static void main(String[] args) throws IOException, JSONException { final String[] columns = { "column1", "column2", "column3", "column4", "column5" }; final Map<String, DataType> dataTypes = new HashMap<String, DataType>(); final Map<String, FieldType> fieldTypes = new HashMap<String, FieldType>(); final Map<String, TimeUnit> timeUnits = new HashMap<String, TimeUnit>(); final Map<String, Integer> cardinality = new HashMap<String, Integer>(); final Map<String, IntRange> range = new HashMap<String, IntRange>(); for (final String col : columns) { dataTypes.put(col, DataType.INT); fieldTypes.put(col, FieldType.DIMENSION); cardinality.put(col, 1000); } final DataGeneratorSpec spec = new DataGeneratorSpec(Arrays.asList(columns), cardinality, range, dataTypes, fieldTypes, timeUnits, FileFormat.AVRO, "/tmp/out", true); final DataGenerator gen = new DataGenerator(); gen.init(spec); gen.generate(1000000L, 2); }
private void addTextAndImageLinks(List<Link> links, DocumentIndex reindexingIndex, UserDomainObject user, HttpServletRequest request, IntRange range) { BooleanQuery query = new BooleanQuery(); query.add(new PrefixQuery(new Term(DocumentIndex.FIELD__NONSTRIPPED_TEXT, "http")), Occur.SHOULD); query.add(new PrefixQuery(new Term(DocumentIndex.FIELD__NONSTRIPPED_TEXT, "href")), Occur.SHOULD); query.add(new PrefixQuery(new Term(DocumentIndex.FIELD__IMAGE_LINK_URL, "http")), Occur.SHOULD); List textDocuments = reindexingIndex.search(new SimpleDocumentQuery(query), user); for (Object textDocument1 : textDocuments) { TextDocumentDomainObject textDocument = (TextDocumentDomainObject) textDocument1; if (!range.containsInteger(textDocument.getId())) { continue; } addTextLinks(links, textDocument, request); addImageLinks(links, textDocument, request); } }
private SegmentPartitionMetadata getPartitionMetadataFromTableConfig(String tableName, int numPartitions, int partitionId) { Map<String, ColumnPartitionMetadata> partitionMetadataMap = new HashMap<>(); if (_propertyStore == null) { return null; } TableConfig tableConfig = getRealtimeTableConfig(tableName); SegmentPartitionMetadata partitionMetadata = null; SegmentPartitionConfig partitionConfig = tableConfig.getIndexingConfig().getSegmentPartitionConfig(); if (partitionConfig != null && partitionConfig.getColumnPartitionMap() != null && partitionConfig.getColumnPartitionMap().size() > 0) { Map<String, ColumnPartitionConfig> columnPartitionMap = partitionConfig.getColumnPartitionMap(); for (Map.Entry<String, ColumnPartitionConfig> entry : columnPartitionMap.entrySet()) { String column = entry.getKey(); ColumnPartitionConfig columnPartitionConfig = entry.getValue(); partitionMetadataMap.put(column, new ColumnPartitionMetadata(columnPartitionConfig.getFunctionName(), numPartitions, Collections.singletonList(new IntRange(partitionId)))); } partitionMetadata = new SegmentPartitionMetadata(partitionMetadataMap); } return partitionMetadata; }
private void addOneSegmentWithPartitionInfo(String tableName, String segmentName, String columnName, int partitionNumber) { ColumnMetadata columnMetadata = mock(ColumnMetadata.class); List<IntRange> partitionRanges = new ArrayList<>(); partitionRanges.add(new IntRange(partitionNumber)); when(columnMetadata.getPartitionRanges()).thenReturn(partitionRanges); SegmentMetadataImpl meta = mock(SegmentMetadataImpl.class); if (columnName != null) { when(meta.getColumnMetadataFor(columnName)).thenReturn(columnMetadata); } when(meta.getTableName()).thenReturn(tableName); when(meta.getName()).thenReturn(segmentName); when(meta.getCrc()).thenReturn("0"); _pinotHelixResourceManager.addNewSegment(meta, "downloadUrl"); }
@Test public void testPruner() { SegmentZKMetadata metadata = new OfflineSegmentZKMetadata(); Map<String, ColumnPartitionMetadata> columnPartitionMap = new HashMap<>(); int expectedPartition = 3; columnPartitionMap.put(PARTITION_COLUMN, new ColumnPartitionMetadata(PARTITION_FUNCTION_NAME, NUM_PARTITION, Collections.singletonList(new IntRange(expectedPartition)))); SegmentZKMetadataPrunerService prunerService = new SegmentZKMetadataPrunerService(new String[]{PRUNER_NAME}); SegmentPartitionMetadata segmentPartitionMetadata = new SegmentPartitionMetadata(columnPartitionMap); metadata.setPartitionMetadata(segmentPartitionMetadata); Pql2Compiler compiler = new Pql2Compiler(); for (int actualPartition = 0; actualPartition < NUM_PARTITION; actualPartition++) { String query = "select count(*) from myTable where " + PARTITION_COLUMN + " = " + actualPartition; BrokerRequest brokerRequest = compiler.compileToBrokerRequest(query); SegmentPrunerContext prunerContext = new SegmentPrunerContext(brokerRequest); Assert.assertEquals(prunerService.prune(metadata, prunerContext), (actualPartition != expectedPartition)); } }
/** * Helper method to convert a list of {@link IntRange} to a delimited string. * The delimiter used is {@link #PARTITION_VALUE_DELIMITER} * @param ranges List of ranges to be converted to String. * @return String representation of the lis tof ranges. */ public static String rangesToString(List<IntRange> ranges) { StringBuilder builder = new StringBuilder(); for (int i = 0; i < ranges.size(); i++) { builder.append("["); IntRange range = ranges.get(i); builder.append(range.getMinimumInteger()); builder.append(" "); builder.append(range.getMaximumInteger()); builder.append("]"); if (i < ranges.size() - 1) { builder.append(PARTITION_VALUE_DELIMITER); } } return builder.toString(); }
public ColumnIndexCreationInfo(boolean createDictionary, Object min, Object max, Object sortedUniqueElementsArray, ForwardIndexType forwardIndexType, InvertedIndexType invertedIndexType, boolean isSorted, boolean hasNulls, int totalNumberOfEntries, int maxNumberOfMultiValueElements, int legnthOfLongestEntry, boolean isAutoGenerated, PartitionFunction partitionFunction, int numPartitions, List<IntRange> partitionRanges, Object defaultNullValue) { this.createDictionary = createDictionary; this.min = min; this.max = max; this.sortedUniqueElementsArray = sortedUniqueElementsArray; this.forwardIndexType = forwardIndexType; this.invertedIndexType = invertedIndexType; this.isSorted = isSorted; this.hasNulls = hasNulls; this.totalNumberOfEntries = totalNumberOfEntries; this.maxNumberOfMultiValueElements = maxNumberOfMultiValueElements; this.legnthOfLongestEntry = legnthOfLongestEntry; this.isAutoGenerated = isAutoGenerated; this.partitionFunction = partitionFunction; this.numPartitions = numPartitions; this.partitionRanges = partitionRanges; this.defaultNullValue = defaultNullValue; }
/** * Unit test: * <ul> * <li> Partitioning metadata is written out correctly for column where all values comply to partition scheme. </li> * <li> Partitioning metadata is dropped for column that does not comply to partitioning scheme. </li> * <li> Partitioning metadata is not written out for column for which the metadata was not specified. </li> * </ul> * @throws Exception */ @Test public void testMetadata() throws Exception { SegmentMetadataImpl metadata = (SegmentMetadataImpl) _segment.getSegmentMetadata(); ColumnMetadata columnMetadata = metadata.getColumnMetadataFor(PARTITIONED_COLUMN_NAME); Assert.assertEquals(columnMetadata.getPartitionFunction().toString().toLowerCase(), EXPECTED_PARTITION_FUNCTION.toLowerCase()); List<IntRange> partitionValues = columnMetadata.getPartitionRanges(); Assert.assertEquals(partitionValues.size(), 1); List<IntRange> expectedPartitionValues = ColumnPartitionConfig.rangesFromString(EXPECTED_PARTITION_VALUE_STRING); IntRange actualValue = partitionValues.get(0); IntRange expectedPartitionValue = expectedPartitionValues.get(0); Assert.assertEquals(actualValue.getMinimumInteger(), expectedPartitionValue.getMinimumInteger()); Assert.assertEquals(actualValue.getMaximumInteger(), expectedPartitionValue.getMaximumInteger()); columnMetadata = metadata.getColumnMetadataFor(NON_PARTITIONED_COLUMN_NAME); Assert.assertNull(columnMetadata.getPartitionFunction()); Assert.assertNull(columnMetadata.getPartitionRanges()); }
public void testSplitIntoSequentialRanges() { assertEquals(ImmutableList.of(new IntRange(1)), CollectionUtils.splitIntoSequentialRanges(ImmutableSortedSet.of(1))); assertEquals(ImmutableList.of(new IntRange(1, 3)), CollectionUtils.splitIntoSequentialRanges(ImmutableSortedSet.of(1, 2, 3))); assertEquals(ImmutableList.of(new IntRange(1, 1), new IntRange(3, 3)), CollectionUtils.splitIntoSequentialRanges(ImmutableSortedSet.of(1, 3))); assertEquals(ImmutableList.of(new IntRange(1, 2), new IntRange(4, 5)), CollectionUtils.splitIntoSequentialRanges(ImmutableSortedSet.of(1, 2, 4, 5))); assertEquals(ImmutableList.of(new IntRange(1, 2), new IntRange(4, 4)), CollectionUtils.splitIntoSequentialRanges(ImmutableSortedSet.of(1, 2, 4))); assertEquals(ImmutableList.of(new IntRange(1, 2), new IntRange(4, 4), new IntRange(6, 8)), CollectionUtils.splitIntoSequentialRanges(ImmutableSortedSet.of(1, 2, 4, 6, 7, 8))); }
public void testParseScreenResultIncremental() throws Exception { Screen screen = MakeDummyEntities.makeDummyScreen(115); mockScreenResultParser.parse(screen, SCREEN_RESULT_115_TEST_WORKBOOK_FILE.getFile(), new IntRange(1, 2), false); assertEquals(Collections.EMPTY_LIST, mockScreenResultParser.getErrors()); assertEquals(640, screen.getScreenResult().getAssayWells().size()); mockScreenResultParser.parse(screen, SCREEN_RESULT_115_TEST_WORKBOOK_FILE.getFile(), new IntRange(3, 3), false); assertEquals(Collections.EMPTY_LIST, mockScreenResultParser.getErrors()); assertEquals(960, screen.getScreenResult().getAssayWells().size()); doTestScreenResult115ParseResult(screen.getScreenResult()); assertEquals(960, screen.getScreenResult().getAssayWells().size()); }
private NameUsageMatch assertMatch(String name, LinneanClassification query, Integer expectedKey, @Nullable NameUsageMatch.MatchType type, IntRange confidence) { NameUsageMatch best = matcher.match(name, null, query, false, true); print(name, best); assertEquals("Wrong expected key", expectedKey, best.getUsageKey()); if (type == null) { assertTrue("Wrong none match type", best.getMatchType() != NameUsageMatch.MatchType.NONE); } else { assertEquals("Wrong match type", type, best.getMatchType()); } if (confidence != null) { assertTrue("confidence " + best.getConfidence() + " not within " + confidence, confidence.containsInteger(best.getConfidence())); } assertMatchConsistency(best); return best; }
@Test public void testMatching() throws IOException, InterruptedException { LinneanClassification cl = new NameUsageMatch(); assertMatch("Anephlus", cl, 1100135, new IntRange(92, 95)); assertMatch("Aneplus", cl, 1100050, new IntRange(90, 95)); cl.setKingdom("Animalia"); cl.setClazz("Insecta"); assertMatch("Aneplus", cl, 1100050, new IntRange(97, 100)); // genus Aneplus is order=Coleoptera, but Anelus is a Spirobolida in class Diplopoda cl.setClazz("Diplopoda"); cl.setOrder("Spirobolida"); assertMatch("Aneplus", cl, 1027792, new IntRange(90, 99)); cl.setFamily("Atopetholidae"); assertMatch("Aneplus", cl, 1027792, new IntRange(98, 100)); // too far off assertMatch("Anmeplues", cl, 1, new IntRange(90, 100)); assertNoMatch("Anmeplues", new NameUsageMatch(), new IntRange(-10, 80)); }
@Test public void testAuthorshipMatching2() throws IOException { NameUsageMatch cl = new NameUsageMatch(); assertMatch("Prunella alba", cl, 5608009, new IntRange(98, 100)); assertMatch("Prunella alba Pall. ex M.Bieb.", cl, 5608009, new IntRange(100, 100)); assertMatch("Prunella alba M.Bieb.", cl, 5608009, new IntRange(100, 100)); assertMatch("Prunella alba Pall.", cl, 5608009, new IntRange(80, 90)); assertMatch("Prunella alba Döring", cl, 5608009, new IntRange(80, 90)); // 2 homonyms exist assertMatch("Elytrigia repens", cl, 2706649, new IntRange(92, 98)); assertMatch("Elytrigia repens Desv.", cl, 7522774, new IntRange(98, 100)); assertMatch("Elytrigia repens Nevski", cl, 2706649, new IntRange(98, 100)); assertMatch("Elytrigia repens (L.) Desv.", cl, 7522774, new IntRange(100, 100)); assertMatch("Elytrigia repens (L.) Nevski", cl, 2706649, new IntRange(100, 100)); // very different author, match to genus only assertMatch("Elytrigia repens Karimba", cl, 7826764, NameUsageMatch.MatchType.HIGHERRANK); // basionym author is right, now match the accepted species. Or shouldnt we? assertMatch("Elytrigia repens (L.) Karimba", cl, 2706649, new IntRange(80, 90)); }
@Test public void testOtuMatching() throws IOException { NameUsageMatch cl = new NameUsageMatch(); NameUsageMatch m = assertMatch("BOLD:AAX3687", cl, 993172099, new IntRange(90, 100)); assertEquals("BOLD:AAX3687", m.getScientificName()); assertMatch("SH021315.07FU", cl, 993730906, new IntRange(90, 100)); cl.setFamily("Maldanidae"); assertMatch("bold:aax3687", cl, 993172099, new IntRange(95, 100)); assertNoMatch("BOLD:AAX3688", cl); assertNoMatch("BOLD:AAY3687", cl); assertNoMatch("COLD:AAX3687", cl); assertNoMatch("AAX3687", cl); }
public TimeSpan(Date pAtDate, IntRange pSpan) { //range at Day if(pSpan.getMinimumInteger() == pSpan.getMaximumInteger()) throw new RuntimeException("Span without size not allowed"); pAtDate = DateUtils.truncate(pAtDate, Calendar.DATE); LongRange asLong = new LongRange(pAtDate.getTime() + pSpan.getMinimumLong() * DateUtils.MILLIS_PER_HOUR, pAtDate.getTime() + pSpan.getMaximumLong() * DateUtils.MILLIS_PER_HOUR - 1); init(asLong, false); }
public boolean intersectsWithNightBonus() { if (!ServerSettings.getSingleton().isNightBonusActive()) { return false; } TimeSpan nightBonusSpan = new TimeSpan(new IntRange(ServerSettings.getSingleton().getNightBonusStartHour(), ServerSettings.getSingleton().getNightBonusEndHour())); return nightBonusSpan.intersects(this); }
public IntRange getFarmRange(FARM_CONFIGURATION pConfig) { if (pConfig == null) { pConfig = FARM_CONFIGURATION.C; } switch (pConfig) { case A: return new IntRange(UIHelper.parseIntFromField(jMinFarmRuntimeA, 0), UIHelper.parseIntFromField(jMaxFarmRuntimeA, 60)); case B: return new IntRange(UIHelper.parseIntFromField(jMinFarmRuntimeB, 0), UIHelper.parseIntFromField(jMaxFarmRuntimeB, 60)); default: return new IntRange(UIHelper.parseIntFromField(jMinFarmRuntimeC, 0), UIHelper.parseIntFromField(jMaxFarmRuntimeC, 60)); } }
/** * Print range of DML program lines * @param lines DML script lines of code * @param range Range of lines of DML code to be displayed */ public void printLines(String [] lines, IntRange range) { //Display all lines of DML script for (int lineNumber=range.getMinimumInteger() ; lineNumber<=range.getMaximumInteger() ; lineNumber++) System.out.format("line %4d: %s\n", lineNumber, lines[lineNumber-1]); }
/** * Print range of DML program lines interspersed with corresponding runtime instructions * @param lines DML script lines of code * @param DMLInstMap Mapping between source code line number and corresponding runtime instruction(s) * @param range Range of lines of DML code to be displayed * @param debug Flag for displaying instructions in debugger test integration */ public void printInstructions(String [] lines, TreeMap<Integer, ArrayList<Instruction>> DMLInstMap, IntRange range, boolean debug) { //Display instructions with corresponding DML line numbers for (int lineNumber=range.getMinimumInteger() ; lineNumber<=range.getMaximumInteger() ; lineNumber++) { System.out.format("line %4d: %s\n", lineNumber, lines[lineNumber-1]); if (DMLInstMap.get(lineNumber) != null) { for (Instruction currInst : DMLInstMap.get(lineNumber)) { if (currInst instanceof CPInstruction) { if (!debug) System.out.format("\t\t id %4d: %s\n", currInst.getInstID(), prepareInstruction(currInst.toString())); else { String [] instStr = prepareInstruction(currInst.toString()).split(" "); System.out.format("\t\t id %4d: %s %s\n", currInst.getInstID(), instStr[0], instStr[1]); } } else if (currInst instanceof MRJobInstruction) { MRJobInstruction currMRInst = (MRJobInstruction) currInst; System.out.format("\t\t id %4d: %s\n", currInst.getInstID(), prepareInstruction(currMRInst.getMRString(debug))); } else if (currInst instanceof BreakPointInstruction) { BreakPointInstruction currBPInst = (BreakPointInstruction) currInst; System.out.format("\t\t id %4d: %s\n", currInst.getInstID(), currBPInst.toString()); } } } } }
private ArrayResult(final List<D> data, final List<T> positions, final HMM<D, T, S> model, final double[][] logForwardProbabilities, final double[][] logBackwardProbabilities) { this.data = Collections.unmodifiableList(new ArrayList<>(data)); this.positions = Collections.unmodifiableList(new ArrayList<>(positions)); positionIndexRange = new IntRange(0, positions.size() - 1); this.model = model; positionIndex = composeIndexMap(this.positions); stateIndex = composeIndexMap(model.hiddenStates()); this.logBackwardProbabilities = logBackwardProbabilities; this.logForwardProbabilities = logForwardProbabilities; logDataLikelihood = calculateLogDataLikelihood(logForwardProbabilities, logBackwardProbabilities); }
@Override public boolean execute() throws Exception { LOGGER.info("Executing command: " + toString()); if ((_numRecords < 0) || (_numFiles < 0)) { throw new RuntimeException("Cannot generate negative number of records/files."); } File schemaFile = new File(_schemaFile); Schema schema = new ObjectMapper().readValue(schemaFile, Schema.class); List<String> columns = new LinkedList<String>(); final HashMap<String, DataType> dataTypes = new HashMap<String, DataType>(); final HashMap<String, FieldType> fieldTypes = new HashMap<String, FieldType>(); final HashMap<String, TimeUnit> timeUnits = new HashMap<String, TimeUnit>(); final HashMap<String, Integer> cardinality = new HashMap<String, Integer>(); final HashMap<String, IntRange> range = new HashMap<String, IntRange>(); buildCardinalityRangeMaps(_schemaAnnFile, cardinality, range); final DataGeneratorSpec spec = buildDataGeneratorSpec(schema, columns, dataTypes, fieldTypes, timeUnits, cardinality, range); final DataGenerator gen = new DataGenerator(); gen.init(spec); gen.generate(_numRecords, _numFiles); return true; }
private DataGeneratorSpec buildDataGeneratorSpec(Schema schema, List<String> columns, HashMap<String, DataType> dataTypes, HashMap<String, FieldType> fieldTypes, HashMap<String, TimeUnit> timeUnits, HashMap<String, Integer> cardinality, HashMap<String, IntRange> range) { for (final FieldSpec fs : schema.getAllFieldSpecs()) { String col = fs.getName(); columns.add(col); dataTypes.put(col, fs.getDataType()); fieldTypes.put(col, fs.getFieldType()); switch (fs.getFieldType()) { case DIMENSION: if (cardinality.get(col) == null) { cardinality.put(col, 1000); } break; case METRIC: if (range.get(col) == null) { range.put(col, new IntRange(1, 1000)); } break; case TIME: range.put(col, new IntRange(1, 1000)); TimeFieldSpec tfs = (TimeFieldSpec) fs; timeUnits.put(col, tfs.getIncomingGranularitySpec().getTimeType()); break; default: throw new RuntimeException("Invalid field type."); } } return new DataGeneratorSpec(columns, cardinality, range, dataTypes, fieldTypes, timeUnits, FileFormat.AVRO, _outDir, _overwrite); }
public DataGeneratorSpec(List<String> columns, Map<String, Integer> cardinalityMap, Map<String, IntRange> rangeMap, Map<String, DataType> dataTypesMap, Map<String, FieldType> fieldTypesMap, Map<String, TimeUnit> timeUnitMap, FileFormat format, String outputDir, boolean override) { this.columns = columns; this.cardinalityMap = cardinalityMap; this.rangeMap = rangeMap; outputFileFormat = format; this.outputDir = outputDir; overrideOutDir = override; this.dataTypesMap = dataTypesMap; this.fieldTypesMap = fieldTypesMap; this.timeUnitMap = timeUnitMap; }
public void init(DataGeneratorSpec spec) throws IOException { genSpec = spec; outDir = new File(genSpec.getOutputDir()); if (outDir.exists() && !genSpec.isOverrideOutDir()) { LOGGER.error("output directory already exists, and override is set to false"); throw new RuntimeException("output directory exists"); } if (outDir.exists()) { FileUtils.deleteDirectory(outDir); } outDir.mkdir(); for (final String column : genSpec.getColumns()) { DataType dataType = genSpec.getDataTypesMap().get(column); if (genSpec.getCardinalityMap().containsKey(column)) { generators.put(column, GeneratorFactory.getGeneratorFor(dataType, genSpec.getCardinalityMap().get(column))); } else if (genSpec.getRangeMap().containsKey(column)) { IntRange range = genSpec.getRangeMap().get(column); generators.put(column, GeneratorFactory.getGeneratorFor(dataType, range.getMinimumInteger(), range.getMaximumInteger())); } else { LOGGER.error("cardinality for this column does not exist : " + column); throw new RuntimeException("cardinality for this column does not exist"); } generators.get(column).init(); } }
/** * Method for checking the length of a password is within the bounds * * @param passwd - string to check length of * @param min - minimum length * @param max - maximum length (must be >= min) * @return */ public static boolean isAcceptableLength(String passwd, int min, int max) { //null if (StringUtils.isBlank(passwd)) { return false; } //check bounds if(min > max){ log.error("Invalid bounds supplied, min (" + min + ") is greater than max (" + max + ")"); } // LENGTH int length = passwd.length(); //check range IntRange range = new IntRange(min, max); if(range.containsInteger(length)) { log.debug("Range ok"); return true; } log.debug("Range bad; min=" + min + ", max=" + max + ", length=" + length); return false; }
private int[] getDocumentIds(IntRange idRange) { List<Integer> ids = documentSaver.getDocRepository().getDocumentIdsInRange( idRange.getMinimumInteger(), idRange.getMaximumInteger()); return ArrayUtils.toPrimitive(ids.toArray(new Integer[ids.size()])); }
private void addUrlDocumentLinks(List<Link> links, DocumentIndex reindexingIndex, UserDomainObject user, HttpServletRequest request, IntRange range) { TermQuery urlDocumentsQuery = new TermQuery(new Term(DocumentIndex.FIELD__DOC_TYPE_ID, "" + DocumentTypeDomainObject.URL_ID)); List urlDocuments = reindexingIndex.search(new SimpleDocumentQuery(urlDocumentsQuery), user); for (Object urlDocument1 : urlDocuments) { UrlDocumentDomainObject urlDocument = (UrlDocumentDomainObject) urlDocument1; if (!range.containsInteger(urlDocument.getId())) { continue; } Link link = new UrlDocumentLink(urlDocument, request); links.add(link); } }
public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { UserDomainObject user = Utility.getLoggedOnUser(req); if (!user.isSuperAdmin()) { return; } ImcmsServices imcref = Imcms.getServices(); IntRange allDocumentsRange = new IntRange(getMinDocumentId(imcref), getMaxDocumentId(imcref)); String startString = req.getParameter(PARAMETER__LIST_START); String endString = req.getParameter(PARAMETER__LIST_END); int start = null != startString ? Integer.parseInt(startString) : allDocumentsRange.getMinimumInteger(); int end = null != endString ? Integer.parseInt(endString) : allDocumentsRange.getMaximumInteger(); FormData formData = new FormData(); formData.selectedRange = new IntRange(start, end); DocumentMapper documentMapper = imcref.getDocumentMapper(); if (req.getParameter(PARAMETER_BUTTON__LIST) != null) { formData.documentsIterator = documentMapper.getDocumentsIterator(formData.selectedRange); } req.setAttribute(REQUEST_ATTRIBUTE__FORM_DATA, formData); req.getRequestDispatcher(Utility.getLinkService().get("document.list", user.getLanguageIso639_2())).forward(req, res); }
/** * Attempt to pull items from an inventory into a receiving buffer. * * @param targetInv the target inventory * @param amount the desired number of items * @param buffer an item stack into which to insert * the transferred items * @param filter a filter to whitelist/blacklist items * @return the items pulled, or null if nothing was pulled */ public static ItemStack pullFromInventory(Inventory targetInv, int amount, ItemStack buffer, Filter filter) { if (targetInv == null) { return null; } IntRange range = getExtractionSlots(targetInv); for (int slot = range.getMinimumInteger(); slot <= range.getMaximumInteger(); slot++) { ItemStack stack = targetInv.getItem(slot); if (stack != null) { if ((filter == null || filter.shouldPass(stack)) && (buffer == null || stack.isSimilar(buffer))) { Debugger.getInstance().debug(2, "pulling " + stack + " from " + targetInv.getHolder()); int toTake = Math.min(amount, stack.getAmount()); if (buffer != null) { toTake = Math.min(toTake, buffer.getType().getMaxStackSize() - buffer.getAmount()); } if (toTake > 0) { if (buffer == null) { buffer = stack.clone(); buffer.setAmount(toTake); } else { buffer.setAmount(buffer.getAmount() + toTake); } stack.setAmount(stack.getAmount() - toTake); targetInv.setItem(slot, stack.getAmount() > 0 ? stack : null); return buffer; } } } } return null; }