/** * 解除绑定 * * @param channelContext the channel context */ public void unbind(ChannelContext<SessionContext, P, R> channelContext) { Lock lock = map.getLock().writeLock(); DualHashBidiMap<String, ChannelContext<SessionContext, P, R>> m = map.getObj(); try { lock.lock(); m.removeValue(channelContext); } catch (Exception e) { throw e; } finally { lock.unlock(); } }
/** * 解除绑定 * * @param userid the userid * @author: tanyaowu * @创建时间: 2016年11月17日 下午2:43:28 */ public void unbind(String userid) { Lock lock = map.getLock().writeLock(); DualHashBidiMap<String, ChannelContext<SessionContext, P, R>> m = map.getObj(); try { lock.lock(); m.remove(userid); } catch (Exception e) { throw e; } finally { lock.unlock(); } }
/** * 绑定userid. * * @param userid the userid * @param channelContext the channel context * @author: tanyaowu * @创建时间: 2016年11月17日 下午2:25:46 */ public void bind(String userid, ChannelContext<SessionContext, P, R> channelContext) { String key = userid; Lock lock = map.getLock().writeLock(); DualHashBidiMap<String, ChannelContext<SessionContext, P, R>> m = map.getObj(); try { lock.lock(); m.put(key, channelContext); channelContext.setUserid(userid); } catch (Exception e) { throw e; } finally { lock.unlock(); } }
/** * Find. * * @param userid the userid * @return the channel context */ public ChannelContext<SessionContext, P, R> find(String userid) { String key = userid; Lock lock = map.getLock().readLock(); DualHashBidiMap<String, ChannelContext<SessionContext, P, R>> m = map.getObj(); try { lock.lock(); return (ChannelContext<SessionContext, P, R>) m.get(key); } catch (Exception e) { throw e; } finally { lock.unlock(); } }
/** * Removes映射 * * @param <Ext> the generic type * @param <P> the generic type * @param <R> the generic type * @param channelContext the channel context */ public void remove(ChannelContext<SessionContext, P, R> channelContext) { Lock lock = map.getLock().writeLock(); DualHashBidiMap<String, ChannelContext<SessionContext, P, R>> m = map.getObj(); try { lock.lock(); m.removeValue(channelContext); } catch (Exception e) { throw e; } finally { lock.unlock(); } }
/** * 添加映射. * * @param <Ext> the generic type * @param <P> the generic type * @param <R> the generic type * @param channelContext the channel context * @author: tanyaowu * @创建时间: 2016年11月17日 下午2:25:46 */ public void put(ChannelContext<SessionContext, P, R> channelContext) { String key = getKey(channelContext); Lock lock = map.getLock().writeLock(); DualHashBidiMap<String, ChannelContext<SessionContext, P, R>> m = map.getObj(); try { lock.lock(); m.put(key, channelContext); } catch (Exception e) { throw e; } finally { lock.unlock(); } }
public ChannelContext<SessionContext, P, R> find(String key) { Lock lock = map.getLock().readLock(); DualHashBidiMap<String, ChannelContext<SessionContext, P, R>> m = map.getObj(); try { lock.lock(); return (ChannelContext<SessionContext, P, R>) m.get(key); } catch (Exception e) { throw e; } finally { lock.unlock(); } }
private static BidiMap<State, State> construct(TransitionSystem lts1, TransitionSystem lts2, boolean checkLabels) { // Check trivial case if (lts1.getNodes().size() != lts2.getNodes().size()) { return new DualHashBidiMap<>(); } if (checkLabels) { boolean precond1 = checkPreconditions(lts1); boolean precond2 = checkPreconditions(lts2); if (precond1 != precond2) // Not isomorphic return new DualHashBidiMap<>(); if (precond1 && precond2) // Both lts are totally reachable and deterministic. We can apply a special algorithm. return checkViaDepthSearch(lts1, lts2); } return new IsomorphismLogicComplex(lts1, lts2, checkLabels).getIsomorphism(); }
/** * The main method. * * @param args the arguments * @author: tanyaowu * @创建时间: 2016年11月17日 下午1:12:56 */ public static void main(String[] args) { DualHashBidiMap<String, String> dualHashBidiMap = new DualHashBidiMap<>(); dualHashBidiMap.put("111", "111111"); dualHashBidiMap.put("222", "111111"); System.out.println(dualHashBidiMap.getKey("111111")); }
public InteractionClassModelManager() { this.published = new HashMap<String, InteractionClassModel>(); this.mapInstanceNameInteractionClassEntity = new HashMap<String, InteractionClassEntity>(); this.subscribed = new HashMap<String, InteractionClassModel>(); this.mapInteractionClassHandleClass = new DualHashBidiMap<InteractionClassHandle, Class>(); }
public WordLexicon(final int size, final int nnz, final Function<CharSequence, RIV> rivMaker, final RIVTopicHeirarchy topics, final DualHashBidiMap<String, RIV> lexicon) { super(); this.size = size; this.nnz = nnz; this.topics = topics; this.lexicon = lexicon; permutations = Permutations.generate(size); this.rivMaker = rivMaker; }
public WordLexicon(final int size, final int nnz, final RIVConstructor rivConstructor, final RIVTopicHeirarchy topics, final DualHashBidiMap<String, RIV> lexicon) { this(size, nnz, RIVs.generator(size, nnz, rivConstructor), topics, lexicon); }
/** * Generates map of substitution symbols for given language and substitution array * * @param language language to generate substitution for * @param substitutions array of substitution characters * @return bi-directional map with 1:1 alphabets for given language */ public static BidiMap<Character, Character> generateSubstitutionAlphabet(Language language, char[] substitutions) { char[] languageAlphabet = getAlphabet(language); if (!ArrayUtils.isSameLength(substitutions, languageAlphabet)) throw new IllegalArgumentException("Substitution array should have same alphabets as given language has"); BidiMap<Character, Character> substitutionAlphabet = new DualHashBidiMap<>(); for (int i = 0; i < languageAlphabet.length; i++) { char key = languageAlphabet[i]; char value = substitutions[i]; substitutionAlphabet.put(key, value); } return substitutionAlphabet; }
public ObjectClassModelManager() { this.published = new HashMap<String, ObjectClassModel>(); this.mapInstanceNameObjectClassEntity = new DualHashBidiMap<String, ObjectClassEntity>(); this.subscribed = new HashMap<String, ObjectClassModel>(); this.mapHandleClassObjectClass = new HashMap<ObjectClassHandle, Class>(); this.objectInstanceHandleObjectClassHandle = new HashMap<ObjectInstanceHandle, ObjectClassHandleEntity>(); }
public static void main(String[] args) { String[] englishWords = {"one", "two", "three","ball","snow"}; String[] russianWords = {"jeden", "dwa", "trzy", "kula", "snieg"}; // Create Multiset BidiMap<String, String> biMap = new DualHashBidiMap(); // Create Polish-English dictionary int i = 0; for(String englishWord: englishWords) { biMap.put(englishWord, russianWords[i]); i++; } // Print count words System.out.println(biMap); // Print "{ball=kula, snow=snieg, one=jeden, two=dwa, three=trzy}" - in random orders // Print unique words System.out.println(biMap.keySet()); // print "[ball, snow, one, two, three]"- in random orders System.out.println(biMap.values()); // print "[kula, snieg, jeden, dwa, trzy]" - in random orders // Print translate by words System.out.println("one = " + biMap.get("one")); // print one = jeden System.out.println("two = " + biMap.get("two")); // print two = dwa System.out.println("kula = " + biMap.getKey("kula")); // print kula = ball System.out.println("snieg = " + biMap.getKey("snieg")); // print snieg = snow System.out.println("empty = " + biMap.get("empty")); // print empty = null // Print count word's pair System.out.println(biMap.size()); //print 5 }
public static void main(String[] args) { String[] englishWords = {"one", "two", "three","ball","snow"}; String[] russianWords = {"jeden", "dwa", "trzy", "kula", "snieg"}; // Создаем Multiset BidiMap<String, String> biMap = new DualHashBidiMap(); // создаем англо-польский словарь int i = 0; for(String englishWord: englishWords) { biMap.put(englishWord, russianWords[i]); i++; } // Выводим кол-вом вхождений слов System.out.println(biMap); // напечатает {ball=kula, snow=snieg, one=jeden, two=dwa, three=trzy}- в произвольном порядке // Выводим все уникальные слова System.out.println(biMap.keySet()); // напечатает [ball, snow, one, two, three]- в произвольном порядке System.out.println(biMap.values()); // напечатает [kula, snieg, jeden, dwa, trzy]- в произвольном порядке // Выводим перевод по каждому слову System.out.println("one = " + biMap.get("one")); // напечатает one = jeden System.out.println("two = " + biMap.get("two")); // напечатает two = dwa System.out.println("kula = " + biMap.getKey("kula")); // напечатает kula = ball System.out.println("snieg = " + biMap.getKey("snieg")); // напечатает snieg = snow System.out.println("empty = " + biMap.get("empty")); // напечатает empty = null // Выводим общее количество переводов в словаре System.out.println(biMap.size()); //напечатает 5 }
public Map<Short, Short> getOffsetNodeIds() { DualHashBidiMap<Short, Short> map = new DualHashBidiMap<Short, Short>(); for(Integer id : tree.getNodes()) { map.put(id.shortValue(), id == elemIpiNode ? prefixIpiNode : (short)(id + absolutePosOfRootInPrefixTree)); } return map; }
private void updateDependenciesUnderCoveredNode(ElementaryStringTree tree, DualHashBidiMap<Integer, Integer> coveredNodes, DualHashBidiMap<Short, Short> offsetNodeIdsOfShadowTree, int timestamp, DepNode anchorNode, DependencyArc arc, Iterator<DependencyArc> iterator, String[] words, String[] origPosTags) { boolean removedArcInfinitiveMarker = false; if(tree.isRelation()) { if(arc.isRelationIncomplete()) // avoid filling in an already complete relation entry) { setRelation(arc, anchorNode, iterator, words, origPosTags, "V", true, false); } if(arc.isArgumentIncomplete()) // fill in the arg by re-using the argument of the integration point's child verified above { if(!fillArgumentFromChildNode(arc, coveredNodes, offsetNodeIdsOfShadowTree, tree, words, origPosTags, timestamp)) { // as a last resort fill in the anchor (relation) of the verifying tree as an argument. // Apply infinitive marker heuristic, if necessary removedArcInfinitiveMarker = applyInfinitiveMarkerHeuristic(arc, anchorNode, iterator, words, origPosTags, "V", true, false); // setArgument(arc, anchorNode); } } } else if(arc.isArgumentIncomplete()) // avoid filling in an argument with a relation as well as replacing a complete entry { setArgument(arc, anchorNode, iterator, words, origPosTags, "V", true, false); } // possibly created a complete arc, so we can identify and disambiguate role labels discriminatively if(!removedArcInfinitiveMarker) { boolean keepArc = identifyArcAndDisambiguateRoles(model, arc, words, origPosTags); if(!keepArc) { removeArcSafe(arc, arc.getIntegrationPoint(), iterator); } } }
private boolean addNpSubstBelowSInFringe(List<Fringe> fringes, Node prefixNode, boolean verification, DualHashBidiMap<Short, Short> offsetNodeIdsOfShadowTree) { boolean updated = false; int i = 0; for(Fringe f : fringes) if(addNpSubstBelowSInFringe(f, i++, prefixNode, false, verification, offsetNodeIdsOfShadowTree)) updated = true; return updated; }
/** * * * @return */ public BidiMap<String, String> getGUILanguageStrings() { final BidiMap<String, String> langs = new DualHashBidiMap<String, String>(); final String[] langsFromProp = getProp(AVAILABLE_LOCALES).split(";"); for (String st : langsFromProp) { final String[] parts = st.split("-"); langs.put(parts[0], parts[1]); } return langs; }
@Override @SuppressWarnings(value = "unchecked") public IterableMap<K, V> getMap() { final IterableMap<K, V> testMap = new DualHashBidiMap<K, V>(); testMap.put(((K)("A")), ((V)("a"))); testMap.put(((K)("B")), ((V)("b"))); testMap.put(((K)("C")), ((V)("c"))); return testMap; }
private static BidiMap<State, State> checkViaDepthSearch(TransitionSystem lts1, TransitionSystem lts2) { BidiMap<State, State> result = new DualHashBidiMap<>(); Set<String> alphabet = lts1.getAlphabet(); if (!alphabet.equals(lts2.getAlphabet())) // Not isomorphic, there is an arc with a label not occurring in the other lts return result; Queue<Pair<State, State>> unhandled = new ArrayDeque<>(); visit(result, unhandled, lts1.getInitialState(), lts2.getInitialState()); while (!unhandled.isEmpty()) { InterrupterRegistry.throwIfInterruptRequestedForCurrentThread(); Pair<State, State> pair = unhandled.remove(); State state1 = pair.getFirst(); State state2 = pair.getSecond(); for (String label : alphabet) { State follow1 = follow(state1, label); State follow2 = follow(state2, label); if (!visit(result, unhandled, follow1, follow2)) // Not isomorphic return new DualHashBidiMap<>(); } } return result; }
@Override public void start(Stage primaryStage) { this.stage = primaryStage; primaryStage.setTitle(APPLICATION_NAME + " V" + VERSION + "." + REVISION); ApplicationSettings.initialize(); ApplicationSettings.loadFromFile("settings/plp-tool.settings"); EventRegistry.getGlobalRegistry().register(new ApplicationEventBusEventHandler()); applicationThemeManager = new ApplicationThemeManager(); this.assemblyDetails = new HashMap<>(); this.openFileTabs = new DualHashBidiMap<>(); this.openProjectsPanel = new TabPane(); this.projectExplorer = createProjectTree(); outlineView = createOutlineView(); console = createConsole(); console.println(">> Console Initialized."); openProjectsPanel.getSelectionModel().selectedItemProperty() .addListener(this::onTabActivation); ScrollPane scrollableProjectExplorer = new ScrollPane(projectExplorer); scrollableProjectExplorer.setVbarPolicy(ScrollBarPolicy.AS_NEEDED); scrollableProjectExplorer.setHbarPolicy(ScrollBarPolicy.AS_NEEDED); scrollableProjectExplorer.setFitToHeight(true); scrollableProjectExplorer.setFitToWidth(true); // Left side holds the project tree and outline view SplitPane leftSplitPane = new SplitPane(); leftSplitPane.orientationProperty().set(Orientation.VERTICAL); leftSplitPane.getItems().addAll(scrollableProjectExplorer, outlineView); leftSplitPane.setDividerPositions(0.5, 1.0); leftSplitPane.setMinSize(0, 0); // Right side holds the source editor and the output console SplitPane rightSplitPane = new SplitPane(); rightSplitPane.orientationProperty().set(Orientation.VERTICAL); rightSplitPane.getItems().addAll(Components.wrap(openProjectsPanel), Components.wrap(console)); rightSplitPane.setDividerPositions(0.75, 1.0); rightSplitPane.setMinSize(0, 0); // Container for the whole view (everything under the toolbar) SplitPane explorerEditorSplitPane = new SplitPane(); explorerEditorSplitPane.getItems().addAll(Components.wrap(leftSplitPane), Components.wrap(rightSplitPane)); explorerEditorSplitPane.setDividerPositions(0.225, 1.0); explorerEditorSplitPane.setMinSize(0, 0); SplitPane.setResizableWithParent(leftSplitPane, Boolean.FALSE); //loadOpenProjects(); Parent menuBar = createMenuBar(); Parent toolbar = createToolbar(); BorderPane mainPanel = new BorderPane(); VBox topContainer = new VBox(); topContainer.getChildren().add(menuBar); topContainer.getChildren().add(toolbar); mainPanel.setTop(topContainer); mainPanel.setCenter(explorerEditorSplitPane); int width = DEFAULT_WINDOW_WIDTH; int height = DEFAULT_WINDOW_HEIGHT; Scene scene = new Scene(Components.wrap(mainPanel), width, height); primaryStage.setScene(scene); String themeName = ApplicationSettings.getSetting( ApplicationSetting.APPLICATION_THEME).get(); EventRegistry.getGlobalRegistry().post(new ThemeRequestEvent(themeName)); primaryStage.show(); }
/** * @return the map */ public ObjWithLock<DualHashBidiMap<String, ChannelContext<SessionContext, P, R>>> getMap() { return map; }
public WordLexicon(final int size, final int nnz, final RIVConstructor rivConstructor, final RIVTopicHeirarchy topics) { this(size, nnz, rivConstructor, topics, new DualHashBidiMap<>()); }
public WordLexicon clear() { return new WordLexicon(size, nnz, rivMaker, topics, new DualHashBidiMap<>()); }
/** * Grab argument information from the (potentially argument-complete) subtree rooted on the child node * that is attaching on the integration point of <code>arc</code>. We search through the dependencies * we have already encountered. * @param arc * @param coveredNodes * @param offsetNodeIdsOfShadowTree * @param tree * @param timestamp * @return */ private boolean fillArgumentFromChildNode(DependencyArc arc, DualHashBidiMap<Integer, Integer> coveredNodes, DualHashBidiMap<Short, Short> offsetNodeIdsOfShadowTree, ElementaryStringTree tree, String[] words, String[] origPosTags, int timestamp) { boolean filled = false; if(arc.getArgument() != null) // TODO Investigate { int idOfArgOnPrefix = arc.getArgument().getId(); Short idOfArgOnShadow = offsetNodeIdsOfShadowTree.getKey((short)idOfArgOnPrefix); if(idOfArgOnShadow != null) { Integer idOfArgOnVerif = coveredNodes.get((int)idOfArgOnShadow); if(idOfArgOnVerif != null) { for(int childId : tree.getChildren(idOfArgOnVerif)) { Integer childIdOnShadow = coveredNodes.getKey(childId); if(childIdOnShadow != null) { Collection<DependencyArc> childArcs = dependencies.getDependenciesByIntegPoint(new DepNode( offsetNodeIdsOfShadowTree.get(childIdOnShadow.shortValue()), timestamp)); if(childArcs != null) { Iterator<DependencyArc> iterator = childArcs.iterator(); while(iterator.hasNext()) { DependencyArc childArc = iterator.next(); if(!childArc.isArgumentIncomplete()) // make sure the dependency has argument information { setArgument(arc, childArc.getArgument(), iterator, words, origPosTags, "V", true, false); filled = true; } } // if } // for } } // for } // if } } // if return filled; }
private boolean addNpSubstBelowSInFringe(Fringe fringe, int futureFringePos, Node prefixNode, boolean openRightEmpty, boolean verification, DualHashBidiMap<Short, Short> offsetNodeIdsOfShadowTree) // [][S]:NP { List<Node> openRight = fringe.getAdjNodesOpenRight(); Node lastAdjNode = fringe.getLastAdjNode(); Node elemSubst = fringe.getSubstNode(); if(lastAdjNode == null || elemSubst == null) return false; if( (!openRightEmpty || openRight.isEmpty()) && lastAdjNode.getCategory().equals("S") && elemSubst.getCategory().equals("NP")) // NP in subject position ( S NP! ...) case, i.e., fringe=[][S]:NP { if(verification) // check whether we verify the first two nodes in the stack { Short firstEl = offsetNodeIdsOfShadowTree.get(elemSubst.getNodeId()); Short secondEl = offsetNodeIdsOfShadowTree.get(lastAdjNode.getNodeId()); if(firstEl == null || secondEl == null) return false; return dependencies.twoFirstElementsEquals(firstEl, secondEl); } else { // treat S label // copy-paste the identical Node on the prefix's fringe or future fringe int futureFringeListSize = this.futureFringe.getFringe().size(); Fringe prefixFringe = futureFringePos == -1 || futureFringePos >= futureFringeListSize ? this.fringe : this.futureFringe.getFringe().get(futureFringePos); if(prefixFringe.getLastAdjNode() != null && prefixFringe.getLastAdjNode().identical(fringe.getLastAdjNode())) { dependencies.pushNpHead(prefixFringe.getLastAdjNode()); } else // as a last resort search for the identical node on the prefix fringe or future fringe { Node sNode = findIdenticalNodeInFringe(fringe.getLastAdjNode()); if(sNode != null) dependencies.pushNpHead(sNode); else { dependencies.pushNpHead(fringe.getLastAdjNode()); // System.out.println("identical S not found in prefix tree"); } } // treat NP label // in case the tree is ( root S - NP! ...) and the prefixNode is also an NP, i.e., // adjoins to the leftmost NP! push the prefixNode in the stack (hopefully containing complete delta info) Node npHead; Node prefixSubstNode = prefixFringe.getSubstNode(); if(openRightEmpty && prefixNode.getCategory().equals("NP")) { npHead = prefixNode; } // copy-paste the identical Node on the prefix's fringe or future fringe else if(prefixSubstNode != null && prefixSubstNode.identical(elemSubst)) { npHead = prefixSubstNode; } else // as a last resort search for the identical node on the prefix fringe or future fringe { // TODO: check if it applies Node npNode = findIdenticalSubstNodeInFringe(elemSubst); if(npNode != null) npHead = npNode; else { npHead = elemSubst; // System.out.println("identical NP not found in prefix tree"); } } dependencies.pushNpHead(npHead); } return true; } return false; }
private boolean addNpSubstBelowSInFringe(Fringe fringe, Node prefixNode, boolean verification, DualHashBidiMap<Short, Short> offsetNodeIdsOfShadowTree) { return addNpSubstBelowSInFringe(fringe, -1, prefixNode, true, verification, offsetNodeIdsOfShadowTree); }
/** * Handles Lists. * <p/> * {@inheritDoc} */ @Override public Object fromJava(Object obj) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException, JSONException { BidiMap<String, String> codec = CODEC.get(); boolean initialCall = false; if (codec == null) { initialCall = true; codec = new DualHashBidiMap<>(); CODEC.set(codec); } try { Object returnValue; if (obj instanceof List<?>) { List<?> list = (List<?>) obj; JSONObject jsonObject = new JSONObject(); jsonObject.put(encode("class"), encode("qx.data.Array")); jsonObject.put(encode("array"), fromJava(list.toArray())); returnValue = jsonObject; } else if (obj instanceof BigDecimal) { returnValue = super.fromJava(((BigDecimal) obj).doubleValue()); } else if (obj instanceof BigInteger) { returnValue = super.fromJava(((BigInteger) obj).longValue()); } else if (obj instanceof Double) { if (((Double) obj).isNaN() || ((Double) obj).isInfinite()) { returnValue = null; } else { returnValue = super.fromJava(((Double) obj).doubleValue()); } } else { returnValue = super.fromJava(obj); } if (initialCall) { JSONObject wrapper = new JSONObject(); wrapper.put("codec", new JSONObject(codec)); wrapper.put("payload", returnValue); returnValue = wrapper; } return returnValue; } finally { if (initialCall) { CODEC.remove(); } } }
@Override public MapIterator<K, V> makeEmptyIterator() { return org.apache.commons.collections4.iterators.UnmodifiableMapIterator.unmodifiableMapIterator(new DualHashBidiMap<K, V>().mapIterator()); }