protected static final void writeMetaData(File resultFile, THashMap<String, String> cmdLine) { StringBuilder metaDataLineBuilder = new StringBuilder(); for (String optionKey : cmdLine.keySet()) { if (cmdLine.get(optionKey) != null) { metaDataLineBuilder.append(String.format("# %s :\t%s\n", optionKey, cmdLine.get(optionKey))); System.out.print(String.format("# %s :\t%s\n", optionKey, cmdLine.get(optionKey))); } else { metaDataLineBuilder.append(String.format("# %s :\t%s\n", optionKey, "true")); System.out.print(String.format("# %s :\t%s\n", optionKey, "true")); } } metaDataLineBuilder.append("#Filename\t#Rows\t#Columns\tTime\t#Deps\t#<2Deps\t#<3Deps\t#<4Deps\t#<5Deps\t#<6Deps\t#>5Deps\t#Partitions\n"); System.out.println("#Filename\t#Rows\t#Columns\tTime\t#Deps\t#<2Deps\t#<3Deps\t#<4Deps\t#<5Deps\t#<6Deps\t#>5Deps\t#Partitions\n"); try { BufferedWriter resultFileWriter = new BufferedWriter(new FileWriter(resultFile)); resultFileWriter.write(metaDataLineBuilder.toString()); resultFileWriter.close(); } catch (IOException e) { System.out.println("Couldn't write meta data."); } }
@Test public void combinedKeyWithTHashMapIsScalarized() throws Exception { final THashMap<StringKey, String> troveMap = new THashMap<>( MAP ); assertThat( new Scenario() { @Override public long run() { final StringKey combinedKey = generateKey( SUCCESSFUL_LOOKUPS_PROBABILITY ); final String value = troveMap.get( combinedKey ); if( value == null ) { return combinedKey.item1.length(); } else { return value.length(); } } }, finallyAllocatesNothing() ); }
public void testTHashMap() { TMap<String,String> map = new THashMap<String, String>(); // Add 5, remove the first four, repeat String[] to_remove = new String[ 4 ]; int batch_index = 0; for( String s : Constants.STRING_OBJECTS ) { if ( batch_index < 4 ) { to_remove[ batch_index ] = s; } map.put( s, s ); batch_index++; if ( batch_index == 5 ) { for( String s_remove : to_remove ) { map.remove( s_remove ); } batch_index = 0; } } }
public void testTrove2HashMap() { gnu.trove.THashMap<String,String> map = new gnu.trove.THashMap<String, String>(); // Add 5, remove the first four, repeat String[] to_remove = new String[ 4 ]; int batch_index = 0; for( String s : Constants.STRING_OBJECTS ) { if ( batch_index < 4 ) { to_remove[ batch_index ] = s; } map.put( s, s ); batch_index++; if ( batch_index == 5 ) { for( String s_remove : to_remove ) { map.remove( s_remove ); } batch_index = 0; } } }
/** * Constructs a container for states associated with a given {@code period}. * Do not use {@code ConcurrentHashMap} in conjunction with forward recursion. * * @param period the period associated with this container. * @param hash the type of hash used to store the state space */ public StateSpace(int period, HashType hash){ this.period = period; switch(hash){ case HASHTABLE: this.states = new Hashtable<SD,State>(); break; case CONCURRENT_HASHMAP: this.states = Collections.synchronizedMap(new ConcurrentHashMap<SD,State>()); break; case THASHMAP: this.states = Collections.synchronizedMap(new THashMap<SD,State>()); break; case MAPDB_MEMORY: this.states = new MapDBHashTable<SD,State>("states", Storage.MEMORY); break; case MAPDB_DISK: this.states = new MapDBHashTable<SD,State>("states", Storage.DISK); break; default: throw new NullPointerException("HashType not available"); } }
/** * Constructs a container for states associated with a given {@code period}. * Do not use {@code ConcurrentHashMap} in conjunction with forward recursion. * * @param period the period associated with this container. * @param hash the type of hash used to store the state space * @param stateSpaceSizeLowerBound a lower bound for the sdp state space size, used to initialise the internal hash maps * @param loadFactor the internal hash maps load factor */ public StateSpace(int period, HashType hash, int stateSpaceSizeLowerBound, float loadFactor){ this.period = period; switch(hash){ case HASHTABLE: states = new Hashtable<SD,State>(stateSpaceSizeLowerBound,loadFactor); break; case CONCURRENT_HASHMAP: states = Collections.synchronizedMap(new ConcurrentHashMap<SD,State>(stateSpaceSizeLowerBound,loadFactor)); break; case THASHMAP: states = Collections.synchronizedMap(new THashMap<SD,State>(stateSpaceSizeLowerBound,loadFactor)); break; case MAPDB_MEMORY: this.states = new MapDBHashTable<SD,State>("states", Storage.MEMORY); break; case MAPDB_DISK: this.states = new MapDBHashTable<SD,State>("states", Storage.DISK); break; default: throw new NullPointerException("HashType not available"); } }
/** * Go through the options and sort out the long and the short options * into their own hash maps. * * @since 64bitlabsutils 1.07.00 */ private void processOptions(){ longOptions = new THashMap<>(); shortOptions= new THashMap<>(); for (CmdLnOption option: options){ option.setImmutable(); for (String name: option.getLongNames()){ if (longOptions.containsKey(name)){ throw new IllegalArgumentException("More than one long option has the name: '" + name + "'"); } longOptions.put(name, option); } for (Character c: option.getShortNames()){ if (shortOptions.containsKey(c)){ throw new IllegalArgumentException("More than one short option has the character: '" + c + "'"); } shortOptions.put(c, option); } } }
public THashMap<String, String> read(long n){ THashMap<String,String> res = new THashMap<String,String>(); long count = 0; for(String key : file_index.keySet()){ if(count < n){ String tmp = read(key); res.put(key, tmp); count++; } else break; } return res; }
private THashMap<String, String> readFileIndex(){ THashMap<String,String> file_index = new THashMap<String,String>(); try{ String line = null; while((line=file_index_reader.readLine()) != null){ String[] vals = line.split("\t"); file_index.put(vals[0], vals[1]+":"+vals[2]); index_size++; } return file_index; } catch(Exception e){ e.printStackTrace(); } return null; }
public static void loadFileIndex(String file, THashMap<String, String> items_pair_value){ try{ BufferedReader br = new BufferedReader(new FileReader(file)); String line = null; while((line=br.readLine()) != null){ String[] vals = line.split("\t"); items_pair_value.put(vals[0], vals[1]+":"+vals[2]); } br.close(); } catch(Exception e){ e.printStackTrace(); } }
/** * Index all current annotations of this document. Creates an internal cache * for mappings from bioentity-id to set of annotations. * * @see #getGeneAnnotations(String) */ public void index() { annotationMap = new THashMap<String, List<GeneAnnotation>>(); for (GeneAnnotation a : getGeneAnnotations()) { String eid = a.getBioentity(); List<GeneAnnotation> entities = annotationMap.get(eid); if (entities == null) { annotationMap.put(eid, Collections.singletonList(a)); } else if (entities.size() == 1) { List<GeneAnnotation> longEntities = new ArrayList<GeneAnnotation>(); longEntities.add(entities.get(0)); longEntities.add(a); annotationMap.put(eid, longEntities); } else { entities.add(a); } } }
protected boolean beginExecution(final IScope scope) throws GamaRuntimeException { final IAgent agent = scope.getAgent(); if (scope.interrupted()) { return false; } final Boolean enter = (Boolean) agent.getAttribute(ENTER); Map<String, Object> memory = (Map) agent.getAttribute(STATE_MEMORY); if (enter || memory == null) { memory = new THashMap<>(); agent.setAttribute(STATE_MEMORY, memory); } else { for (final Map.Entry<String, Object> entry : memory.entrySet()) { scope.addVarWithValue(entry.getKey(), entry.getValue()); } } if (enter) { if (enterActions != null) { enterActions.executeOn(scope); } if (agent.dead()) { return false; } agent.setAttribute(ENTER, false); } return true; }
static void initResources() { eType = GamlPackage.eINSTANCE.getTypeDefinition(); eVar = GamlPackage.eINSTANCE.getVarDefinition(); eSkill = GamlPackage.eINSTANCE.getSkillFakeDefinition(); eAction = GamlPackage.eINSTANCE.getActionDefinition(); eUnit = GamlPackage.eINSTANCE.getUnitFakeDefinition(); eEquation = GamlPackage.eINSTANCE.getEquationDefinition(); resources = new THashMap<>(); resources.put(eType, createResource("types.xmi")); resources.put(eVar, createResource("vars.xmi")); resources.put(eSkill, createResource("skills.xmi")); resources.put(eUnit, createResource("units.xmi")); resources.put(eAction, createResource("actions.xmi")); resources.put(eEquation, createResource("equations.xmi")); descriptions = new THashMap<>(); descriptions.put(eVar, new THashMap<>()); descriptions.put(eType, new THashMap<>()); descriptions.put(eSkill, new THashMap<>()); descriptions.put(eUnit, new THashMap<>()); descriptions.put(eAction, new THashMap<>()); descriptions.put(eEquation, new THashMap<>()); allNames = new THashSet<>(); }
/** * Creates and registers an extension instance. * * @param extensionClass * the extension class. * @param proxy * the proxy to register the extension on. * @return the component extension. */ protected synchronized IComponentExtension<? extends IComponent> getExtensionInstance( Class<IComponentExtension<IComponent>> extensionClass, IComponent proxy) { IComponentExtension<IComponent> extension; if (componentExtensions == null) { componentExtensions = new THashMap<>(1, 1.0f); extension = null; } else { extension = componentExtensions.get(extensionClass); } if (extension == null) { extension = extensionFactory.createComponentExtension(extensionClass, componentDescriptor.getComponentContract(), proxy); componentExtensions.put(extensionClass, extension); configureExtension(extension); } return extension; }
public EdgesIterator (ACRF.UnrolledGraph acrf, Assignment observed) { this.graph = acrf; this.observed = observed; cliquesByEdge = new THashMap(); for (Iterator it = acrf.unrolledVarSetIterator (); it.hasNext();) { ACRF.UnrolledVarSet clique = (ACRF.UnrolledVarSet) it.next (); for (int v1idx = 0; v1idx < clique.size(); v1idx++) { Variable v1 = clique.get(v1idx); List adjlist = graph.allFactorsContaining (v1); for (Iterator factorIt = adjlist.iterator(); factorIt.hasNext();) { Factor factor = (Factor) factorIt.next (); if (!cliquesByEdge.containsKey (factor)) { cliquesByEdge.put (factor, new ArrayList()); } List l = (List) cliquesByEdge.get (factor); if (!l.contains (clique)) { l.add (clique); } } } } cursor = cliquesByEdge.keySet().iterator (); }
private void setupLabel2Var () { idx2var = new Variable [lblseq.size ()][]; var2label = new THashMap(); for (int t = 0; t < lblseq.size (); t++) { Labels lbls = lblseq.getLabels (t); idx2var[t] = new Variable [lbls.size ()]; for (int j = 0; j < lbls.size (); j++) { Label lbl = lbls.get (j); Variable var = new Variable (lbl.getLabelAlphabet ()); var.setLabel ("I"+id+"_VAR[f=" + j + "][tm=" + t + "]"); idx2var[t][j] = var; var2label.put (var, lbl); } } }
public Record (String name, LabeledSpans spans) { this.name = name; fieldMap = new THashMap (); for (int i = 0; i < spans.size(); i++) { LabeledSpan span = spans.getLabeledSpan (i); if (!span.isBackground()) { Label tag = span.getLabel (); Field field = (Field) fieldMap.get (tag); if (field == null) { field = new Field (span); fieldMap.put (tag, field); } else { field.addFiller (span); } } } }
@SuppressWarnings( "unchecked" ) public GenericContextResolver(Iterable<ContextResolver> resolvers) { resolverMapping = new THashMap<Class, Collection<ContextResolver>>(); for (ContextResolver resolver : resolvers) { Class streamClass = resolver.getContextOwnerClass(); Collection<ContextResolver> list = resolverMapping.get(streamClass); if (list == null) { resolverMapping.put(streamClass, Collections.singleton(resolver)); } else { if (list.size() == 1) { list = new ArrayList<ContextResolver>(list); resolverMapping.put(streamClass, list); } list.add(resolver); } } }
@Override public Map<String, List<CacheInfo>> getCachesByTag() { Map<String, List<CacheInfo>> res = new THashMap<String, List<CacheInfo>>(); for (CacheInfo cacheInfo : getCaches()) { String[] tags = cacheInfo.getTags(); if (tags != null) { for (String tag : tags) { List<CacheInfo> list = res.get(tag); if (list == null) { list = new ArrayList<CacheInfo>(); res.put(tag, list); } list.add(cacheInfo); } } } return res; }
private static void readDataSetFile(List<DataSetFileEntry> _data) { features_frequencies_per_class = new THashMap(); records_count = _data.size(); for (DataSetFileEntry line_entry : _data) { classes_frequencies.increment(line_entry.class_name, 1); for (String feature_name : line_entry.features.keySet()) { // the value of the feature is either 1 or nothing ////FIXME this part I am not sure about int feature_val = line_entry.features.get(feature_name).intValue(); CustomStringIntHashMap feature_map; if (features_frequencies_per_class.containsKey(feature_name)) { feature_map = features_frequencies_per_class.get(feature_name); } else { feature_map = new CustomStringIntHashMap(); features_frequencies_per_class.put(feature_name, feature_map); } feature_map.increment(line_entry.class_name, feature_val); features_frequencies.increment(feature_name, feature_val); } } all_features_count = features_frequencies.size(); all_classes_count = classes_frequencies.size(); }
public static Map<String, InputStream> listStreams(ZipInputStream jar, String folder, Consumer filter) throws IOException { THashMap<String, InputStream> output = new THashMap<String, InputStream>(); int offset = folder.length() + 1; // Enumeration entries = jar.entries(); ZipEntry entry; while ((entry = jar.getNextEntry()) != null) { // ZipEntry entry = (ZipEntry) entries.nextElement(); String name = entry.getName(); if (name.length() <= offset || name.endsWith("/")) { continue; } // entry. filter.consume(name, jar); } return output; }
private Map<String, Map<String, String>> serialize(Set<QuestInstance> set) { Map<String, Map<String, String>> result = new THashMap<>(); for (QuestInstance questInstance : set) { String quester = questInstance.getQuester(); Map<String, String> map = result.get(quester); if (map == null) { map = new THashMap<>(); result.put(quester, map); } map.put(questInstance.getInfo().getName(), questInstance.serializeProgression()); } return result; }
@Override public Map<String, Quest> loadQuests(File directory) { if (directory == null || !directory.isDirectory()) { return null; } Yaml yaml = new Yaml(); Map<String, Quest> result = new THashMap<>(); // iterate through all files in the directory which end with .yml for (File file : directory.listFiles(fl -> checkExtension(fl, "yml"))) { try { Quest loaded = loadQuest(yaml, new FileInputStream(file)); if (loaded != null) { result.put(loaded.getName(), loaded); } } catch (FileNotFoundException e) { e.printStackTrace(); } } return result; }
@Override public Map<String, Quest> loadQuests(File directory) { if (directory == null || !directory.isDirectory()) { return null; } Map<String, Quest> result = new THashMap<>(); // iterate through all files in the directory which end with .js for (File file : directory.listFiles(fl -> checkExtension(fl, "js"))) { try { Quest loaded = loadQuest(new FileReader(file)); if (loaded != null) { result.put(loaded.getName(), loaded); } } catch (FileNotFoundException e) { e.printStackTrace(); } } return result; }
public THashMap<String, String> parse(String[] input) { THashMap<String, String> result = new THashMap<>(); try { CommandLine cmdLine = this.parse(this.getOptions(), input); for (Option option : cmdLine.getOptions()) { result.put(option.getOpt(), option.getValue()); } } catch (ParseException e) { e.printStackTrace(); } return result; }
@SuppressWarnings({"MismatchedQueryAndUpdateOfCollection"}) public void testTPHashMapConstructors() { int cap = 20; THashMap cap_and_factor = new THashMap( cap, 0.75f ); assertTrue( "capacity not sufficient: " + cap + ", " + cap_and_factor.capacity(), cap <= cap_and_factor.capacity() ); assertEquals( 0.75f, cap_and_factor._loadFactor ); }
/** Use valueOf() instead. */ private Tags(Key tags) { tags_ = unmodifiableMap(tags.stream() .collect(Collectors.toMap( Map.Entry::getKey, Map.Entry::getValue, (x, y) -> { throw new IllegalStateException("duplicate metric name"); }, () -> new THashMap<String, MetricValue>(tags.size(), LOAD_FACTOR)))); hashCode_ = Objects.hashCode(this.tags_); }
private static Map<SimpleGroupPath, Map<GroupName, SegmentReader<RTFGroupTable>>> outerMap(tables xdrTables, DictionaryDelta dictionary, SegmentReader<DictionaryDelta> dictionarySegment, SegmentReader.Factory<XdrAble> segmentFactory) { return Arrays.stream(xdrTables.value) .map(tg -> { final SimpleGroupPath path = SimpleGroupPath.valueOf(dictionary.getPath(tg.group_ref)); final Map<GroupName, SegmentReader<RTFGroupTable>> groups = unmodifiableMap(innerMap(path, tg, dictionary, dictionarySegment, segmentFactory)); return SimpleMapEntry.create(path, groups); }) .collect(Collectors.toMap( Map.Entry::getKey, Map.Entry::getValue, (a, b) -> { throw new DecodingException("duplicate group reference"); }, () -> new THashMap<>(1, 1))); }
private static Map<GroupName, SegmentReader<RTFGroupTable>> innerMap(SimpleGroupPath path, tables_group tg, DictionaryDelta dictionary, SegmentReader<DictionaryDelta> dictionarySegment, SegmentReader.Factory<XdrAble> segmentFactory) { return Arrays.stream(tg.tag_tbl) .collect(Collectors.toMap( tt -> GroupName.valueOf(path, dictionary.getTags(tt.tag_ref)), tt -> segmentFromFilePos(FromXdr.filePos(tt.pos), dictionarySegment, segmentFactory), (a, b) -> { throw new DecodingException("duplicate tag reference"); }, () -> new THashMap<>(1, 1))); }
private static Map<MetricName, SegmentReader<RTFMetricTable>> metricsMap(tables_metric tmArray[], DictionaryDelta dictionary, SegmentReader.Factory<XdrAble> segmentFactory) { return Arrays.stream(tmArray) .collect(Collectors.toMap( tm -> MetricName.valueOf(dictionary.getPath(tm.metric_ref)), tm -> segmentFactory.get(metric_table::new, FromXdr.filePos(tm.pos)) .map(mt -> new RTFMetricTable(mt, dictionary)) .peek(RTFMetricTable::validate) .cache(), (a, b) -> { throw new DecodingException("duplicate metric reference"); }, () -> new THashMap<>(1, 1))); }
/** * Creates a new value repository. Do not use {@code ConcurrentHashMap} in conjunction with forward recursion. * * @param immediateValueFunction the immediate value of a transition from {@code initialState} to * {@code finalState} under a chosen {@code action}. * @param discountFactor the value function discount factor * @param hash the type of hash used to store the state space */ public ValueRepository(ImmediateValueFunction<State, Action, Double> immediateValueFunction, double discountFactor, HashType hash){ this.setImmediateValue(immediateValueFunction); this.discountFactor = discountFactor; switch(hash){ case HASHTABLE: this.valueHashTable = new Hashtable<StateAction,Double>(); this.optimalValueHashTable = new Hashtable<State,Double>(); this.optimalActionHashTable = new Hashtable<State,Action>(); break; case CONCURRENT_HASHMAP: this.valueHashTable = Collections.synchronizedMap(new ConcurrentHashMap<StateAction,Double>()); this.optimalValueHashTable = Collections.synchronizedMap(new ConcurrentHashMap<State,Double>()); this.optimalActionHashTable = Collections.synchronizedMap(new ConcurrentHashMap<State,Action>()); break; case THASHMAP: this.valueHashTable = Collections.synchronizedMap(new THashMap<StateAction,Double>()); this.optimalValueHashTable = Collections.synchronizedMap(new THashMap<State,Double>()); this.optimalActionHashTable = Collections.synchronizedMap(new THashMap<State,Action>()); break; case MAPDB_MEMORY: this.valueHashTable = new MapDBHashTable<StateAction,Double>("valueHashTable", Storage.MEMORY); this.optimalValueHashTable = new MapDBHashTable<State,Double>("optimalValueHashTable", Storage.MEMORY); this.optimalActionHashTable = new MapDBHashTable<State,Action>("optimalActionHashTable", Storage.MEMORY); break; case MAPDB_DISK: this.valueHashTable = new MapDBHashTable<StateAction,Double>("valueHashTable", Storage.DISK); this.optimalValueHashTable = new MapDBHashTable<State,Double>("optimalValueHashTable", Storage.DISK); this.optimalActionHashTable = new MapDBHashTable<State,Action>("optimalActionHashTable", Storage.DISK); break; default: throw new NullPointerException("HashType not available"); } }