@Override protected void cleanup(final Context context) throws IOException, InterruptedException { final IntWritable keyW = new IntWritable(); final IntWritable valueW = new IntWritable(); TIntIntIterator it = this.combiner.iterator(); while(it.hasNext()) { it.advance(); keyW.set(it.key()); valueW.set(it.value()); context.write(keyW, valueW); } this.combiner = null; }
public void dispose() { logger.debug("Disposing shader {}.", title); TIntIntIterator it = shaderPrograms.iterator(); while (it.hasNext()) { it.advance(); GL20.glDeleteProgram(it.value()); } shaderPrograms.clear(); it = fragmentPrograms.iterator(); while (it.hasNext()) { it.advance(); GL20.glDeleteShader(it.value()); } fragmentPrograms.clear(); it = vertexPrograms.iterator(); while (it.hasNext()) { it.advance(); GL20.glDeleteShader(it.value()); } vertexPrograms.clear(); }
final public int insertUnclosedPatterns(PerItemTopKCollector topKcoll, boolean outputPatternsForFutureExtensions) { int[] topKDistinctSupports = new int[topKcoll.getK()]; int[] topKCorrespondingItems = new int[topKcoll.getK()]; boolean highestUnique = false; // split between extension candidates and others ? // set a max because some items will never be able to raise their // threshold anyway? TIntIntIterator supportIterator = this.supportCounts.iterator(); while (supportIterator.hasNext()) { supportIterator.advance(); if (outputPatternsForFutureExtensions && supportIterator.key() < this.maxCandidate) { topKcoll.collectUnclosedForItem(supportIterator.value(), this.pattern, this.reverseRenaming[supportIterator.value()]); } else { highestUnique = updateTopK(topKDistinctSupports, topKCorrespondingItems, supportIterator.key(), supportIterator.value(), highestUnique); } } boolean highest = true; int lastInsertSupport = this.minSupport; for (int i = topKDistinctSupports.length - 1; i >= 0; i--) { if (topKDistinctSupports[i] == 0) { // AKA I didn't find k distinct supports lastInsertSupport = -1; break; } else { int[] newPattern = Arrays.copyOf(this.pattern, this.pattern.length + 1); newPattern[pattern.length] = this.reverseRenaming[topKCorrespondingItems[i]]; topKcoll.collect(topKDistinctSupports[i], newPattern, highest && highestUnique); lastInsertSupport = topKDistinctSupports[i]; } highest = false; } return lastInsertSupport; }
public MapTidList(final TIntIntMap lengths) { TIntIntIterator iter = lengths.iterator(); while (iter.hasNext()) { iter.advance(); this.occurrences.put(iter.key(), new TIntArrayList(iter.value())); } }
@Override protected void cleanup(final Context context) throws IOException, InterruptedException { final NullWritable nullKey = NullWritable.get(); final ItemAndSupportWritable valueW = new ItemAndSupportWritable(); TIntIntIterator it = this.combiner.iterator(); while(it.hasNext()) { it.advance(); valueW.set(it.key(), it.value()); context.write(nullKey, valueW); } this.combiner = null; }
public void preloadBounds(TIntIntMap perItemBounds) { TIntIntIterator iterator = perItemBounds.iterator(); int[] fakePattern = new int[] { }; while (iterator.hasNext()) { iterator.advance(); final int item = iterator.key(); PatternWithFreq[] top = this.topK.get(item); if (top != null) { Arrays.fill(top, new PatternWithFreq(iterator.value(), fakePattern, true)); } } }
@Override protected void cleanup(Context context) throws java.io.IOException, InterruptedException { final Configuration conf = context.getConfiguration(); final int minSupport = conf.getInt(TopPIoverHadoop.KEY_MINSUP, 10); final IntWritable keyW = new IntWritable(); final IntWritable valueW = new IntWritable(); final TreeSet<ItemAndBigSupport> heap = new TreeSet<ItemAndBigSupport>(); TIntIntIterator it = this.itemSupports.iterator(); while(it.hasNext()) { it.advance(); final int support = it.value(); if (support >= minSupport) { heap.add(new ItemAndBigSupport(it.key(), support)); } } this.itemSupports = null; int rebased = 0; for (ItemAndBigSupport entry : heap) { keyW.set(entry.item); valueW.set(rebased++); context.write(keyW, valueW); } rebased -= 1; context.getCounter(COUNTERS_GROUP, COUNTER_REBASING_MAX_ID).setValue(rebased); }
@Override public void testInt_TPrimitiveHashMap() { int total = 0; for( int i = 0; i < ITERATIONS_INT; i++ ) { TIntIntIterator iterator = int_t_primitive_map.iterator(); while( iterator.hasNext() ) { iterator.advance(); total += iterator.key(); } } int_slot.set( total ); }
@Override public void testInt_Trove2PrimitiveHashMap() { int total = 0; for( int i = 0; i < ITERATIONS_INT; i++ ) { gnu.trove.TIntIntIterator iterator = int_t2_primitive_map.iterator(); while( iterator.hasNext() ) { iterator.advance(); total += iterator.key(); } } int_slot.set( total ); }
public void duplicate(ArrayList<String> docs) throws Exception { this.docs = docs; dsMap = new TreeSet<DocSim>(); feature(); similarity(); System.out.println("去重复"); boolean[] dup = new boolean[docs.size()]; for(int id1=0;id1<docs.size();id1++){ if(dup[id1]||similarityMap[id1]==null) continue; ArrayList<Integer> ids = new ArrayList<Integer>(); ids.add(id1); TIntIntIterator it = similarityMap[id1].iterator(); for ( int i = similarityMap[id1].size(); i-- > 0; ) { it.advance(); int id2 = it.key(); double sim = ((double)(it.value()* 2)) / (featureLen[id1] + featureLen[id2]); if(sim > thres ){ dup[id2]= true; ids.add(id2); } } DocSim docSim = new DocSim(ids); dsMap.add(docSim); } }
/** * * @param freqmap * @return */ public static TreeMap<Integer, Integer> countFrequency(TIntIntHashMap freqmap) { TreeMap<Integer, Integer> map = new TreeMap<Integer,Integer>(); TIntIntIterator it = freqmap.iterator(); while(it.hasNext()){ it.advance(); int freq = it.value(); if(map.containsKey(freq)){ map.put(freq, map.get(freq)+1); }else map.put(freq, 1); } return map; }
public void addAll(MaterialCounter other) { for(TIntIntIterator iter = other.counts.iterator(); iter.hasNext();) { iter.advance(); counts.adjustOrPutValue(iter.key(), iter.value(), iter.value()); } }
/** * Will compress an older renaming, by removing infrequent items. Contained * arrays (except closure) will refer new item IDs * * @param olderReverseRenaming * reverseRenaming from the dataset that fed this Counter * @param items * below this parameter will be renamed by decreasing frequency * @return the translation from the old renaming to the compressed one * (gives -1 for removed items) */ public int[] compressSortRenaming(int[] olderReverseRenaming) { if (olderReverseRenaming == null) { olderReverseRenaming = this.reverseRenaming; } this.rebasedDistinctTransactionsCounts = new int[this.nbFrequents]; this.rebasedSupportCounts = new int[this.nbFrequents]; this.reverseRenaming = new int[this.nbFrequents]; // first, compact // we will always have newItemID <= item int newItemIDBelowCandidate = 0; int newItemIDAboveCandidate = this.nbFrequents - 1; TIntIntIterator supportIterator = this.supportCounts.iterator(); // after this loop we have // reverseRenaming: NewBase (index) -> PreviousDatasetBase (value) // supportCounts: NewBase (index) -> Support (value) // distinctTransactionCount: NewBase (index) -> Count (value) while (supportIterator.hasNext()) { supportIterator.advance(); if (supportIterator.key() < this.maxCandidate) { this.reverseRenaming[newItemIDBelowCandidate] = supportIterator.key(); this.rebasedSupportCounts[newItemIDBelowCandidate] = supportIterator.value(); this.rebasedDistinctTransactionsCounts[newItemIDBelowCandidate] = this.distinctTransactionsCounts .get(supportIterator.key()); newItemIDBelowCandidate++; } else { this.reverseRenaming[newItemIDAboveCandidate] = supportIterator.key(); this.rebasedSupportCounts[newItemIDAboveCandidate] = supportIterator.value(); this.rebasedDistinctTransactionsCounts[newItemIDAboveCandidate] = this.distinctTransactionsCounts .get(supportIterator.key()); newItemIDAboveCandidate--; } } this.supportCounts = null; this.distinctTransactionsCounts = null; this.maxCandidate = newItemIDBelowCandidate; this.maxFrequent = this.nbFrequents - 1; // now, sort up to the pivot this.quickSortOnSup(0, this.maxCandidate); int[] renaming = new int[Math.max(olderReverseRenaming.length, this.rebasingSize)]; Arrays.fill(renaming, -1); // after this loop we have // reverseRenaming: NewBase (index) -> OriginalBase (value) // renaming: PreviousDatasetBase (index) -> NewBase (value) for (int i = 0; i <= this.maxFrequent; i++) { renaming[this.reverseRenaming[i]] = i; this.reverseRenaming[i] = olderReverseRenaming[this.reverseRenaming[i]]; } this.compactedArrays = true; return renaming; }
/** * start path extraction considering all the pairs main_item-items */ private void start(){ TIntIntHashMap paths = null; String item_pair_paths = ""; for(int j = 0; j < items.size(); j++){ ItemTree b = items.get(j); int b_id = b.getItemId(); paths = computePaths(main_item, b); if(paths.size() > 0){ item_pair_paths = main_item_id + "-" + b_id + "\t"; TIntIntIterator it = paths.iterator(); while(it.hasNext()){ it.advance(); item_pair_paths += it.key() + "=" + it.value() + ","; } item_pair_paths = item_pair_paths.substring(0, item_pair_paths.length()-1); // text file writing if(textWriter != null) textWriter.write(item_pair_paths); // binary file writing if(pathWriter != null) pathWriter.write(item_pair_paths); if(computeInversePaths){ item_pair_paths = b_id + "-" + main_item_id + "\t"; it = paths.iterator(); while(it.hasNext()){ it.advance(); item_pair_paths += reverse(it.key()) + "=" + it.value() + ","; } item_pair_paths = item_pair_paths.substring(0, item_pair_paths.length()-1); // text file writing if(textWriter != null) textWriter.write(item_pair_paths); // binary file writing if(pathWriter != null){ pathWriter.write(item_pair_paths); } } } } }
@Override public TIntIntIterator iterator() { return intdata.iterator(); }
public TIntIntIterator iterator() { return container.iterator(); }