Java 类com.google.common.collect.LinkedHashMultiset 实例源码

项目:businessworks    文件:WeakKeySet.java   
public void add(Key<?> key, State state, Object source) {
  if (backingMap == null) {
    backingMap = Maps.newHashMap();
  }
  // if it's an instanceof Class, it was a JIT binding, which we don't
  // want to retain.
  if (source instanceof Class || source == SourceProvider.UNKNOWN_SOURCE) {
    source = null;
  }
  Multiset<Object> sources = backingMap.get(key);
  if (sources == null) {
    sources = LinkedHashMultiset.create();
    backingMap.put(key, sources);
  }
  Object convertedSource = Errors.convert(source);
  sources.add(convertedSource);

  // Avoid all the extra work if we can.
  if (state.parent() != State.NONE) {
    Set<KeyAndSource> keyAndSources = evictionCache.getIfPresent(state);
    if (keyAndSources == null) {
      evictionCache.put(state, keyAndSources = Sets.newHashSet());
    }
    keyAndSources.add(new KeyAndSource(key, convertedSource));
  }
}
项目:java_in_examples    文件:GuavaLinkedHashMultisetTest.java   
public static void main(String[] args) {
    // Parse text to separate words
    String INPUT_TEXT = "Hello World! Hello All! Hi World!";
    // Create Multiset
    Multiset<String> multiset = LinkedHashMultiset.create(Arrays.asList(INPUT_TEXT.split(" ")));

    // Print count words
    System.out.println(multiset); // print [Hello x 2, World! x 2, All!, Hi]- in predictable iteration order
    // Print all unique words
    System.out.println(multiset.elementSet());    // print [Hello, World!, All!, Hi] - in predictable iteration order

    // Print count occurrences of words
    System.out.println("Hello = " + multiset.count("Hello"));    // print 2
    System.out.println("World = " + multiset.count("World!"));    // print 2
    System.out.println("All = " + multiset.count("All!"));    // print 1
    System.out.println("Hi = " + multiset.count("Hi"));    // print 1
    System.out.println("Empty = " + multiset.count("Empty"));    // print 0

    // Print count all words
    System.out.println(multiset.size());    //print 6

    // Print count unique words
    System.out.println(multiset.elementSet().size());    //print 4
}
项目:java_in_examples    文件:GuavaLinkedHashMultisetTest.java   
public static void main(String[] args) {
    // Разберем текст на слова
    String INPUT_TEXT = "Hello World! Hello All! Hi World!";
    // Создаем Multiset
    Multiset<String> multiset = LinkedHashMultiset.create(Arrays.asList(INPUT_TEXT.split(" ")));

    // Выводим кол-вом вхождений слов
    System.out.println(multiset); // напечатает [Hello x 2, World! x 2, All!, Hi]- в порядке первого добавления элемента
    // Выводим все уникальные слова
    System.out.println(multiset.elementSet());    // напечатает [Hello, World!, All!, Hi] - в порядке первого добавления элемента

    // Выводим количество по каждому слову
    System.out.println("Hello = " + multiset.count("Hello"));    // напечатает 2
    System.out.println("World = " + multiset.count("World!"));    // напечатает 2
    System.out.println("All = " + multiset.count("All!"));    // напечатает 1
    System.out.println("Hi = " + multiset.count("Hi"));    // напечатает 1
    System.out.println("Empty = " + multiset.count("Empty"));    // напечатает 0

    // Выводим общее количества всех слов в тексте
    System.out.println(multiset.size());    //напечатает 6

    // Выводим общее количество всех уникальных слов
    System.out.println(multiset.elementSet().size());    //напечатает 4
}
项目:SPLevo    文件:CopyContainingFilenameHandler.java   
/**
 * Collect the file paths of the variation points of the selected elements.
 *
 * Each variation point is registered only once, even if several variants of the same variation
 * point have been selected.
 *
 * @param selection
 *            The current selection
 * @return The multi-set of variation point file paths.
 */
private LinkedHashMultiset<String> collectFilePaths(IStructuredSelection selection) {
    LinkedHashMultiset<String> filePaths = LinkedHashMultiset.create();

    Set<VariationPoint> vps = Sets.newLinkedHashSet();

    for (Object selectedItem : selection.toList()) {
        if (selectedItem instanceof Variant) {
            vps.add(((Variant) selectedItem).getVariationPoint());
        } else if (selectedItem instanceof VariationPoint) {
            vps.add((VariationPoint) selectedItem);
        }
    }

    for (VariationPoint vp : vps) {
        if (vp != null) {
            filePaths.add(getFile(vp));
        }
    }

    return filePaths;
}
项目:guice    文件:WeakKeySet.java   
public void add(Key<?> key, State state, Object source) {
  if (backingMap == null) {
    backingMap = Maps.newHashMap();
  }
  // if it's an instanceof Class, it was a JIT binding, which we don't
  // want to retain.
  if (source instanceof Class || source == SourceProvider.UNKNOWN_SOURCE) {
    source = null;
  }
  Multiset<Object> sources = backingMap.get(key);
  if (sources == null) {
    sources = LinkedHashMultiset.create();
    backingMap.put(key, sources);
  }
  Object convertedSource = Errors.convert(source);
  sources.add(convertedSource);

  // Avoid all the extra work if we can.
  if (state.parent() != State.NONE) {
    Set<KeyAndSource> keyAndSources = evictionCache.getIfPresent(state);
    if (keyAndSources == null) {
      evictionCache.put(state, keyAndSources = Sets.newHashSet());
    }
    keyAndSources.add(new KeyAndSource(key, convertedSource));
  }
}
项目:gwt-jackson    文件:MultisetGwtTest.java   
public void testSerialization() {
    BeanWithMultisetTypes bean = new BeanWithMultisetTypes();

    List<String> list = Arrays.asList( "foo", "abc", null, "abc" );
    List<String> listWithNonNull = Arrays.asList( "foo", "abc", "bar", "abc" );

    bean.multiset = LinkedHashMultiset.create( list );
    bean.hashMultiset = HashMultiset.create( Arrays.asList( "abc", "abc" ) );
    bean.linkedHashMultiset = LinkedHashMultiset.create( list );
    bean.sortedMultiset = TreeMultiset.create( listWithNonNull );
    bean.treeMultiset = TreeMultiset.create( listWithNonNull );
    bean.immutableMultiset = ImmutableMultiset.copyOf( listWithNonNull );
    bean.enumMultiset = EnumMultiset.create( Arrays.asList( AlphaEnum.B, AlphaEnum.A, AlphaEnum.D, AlphaEnum.A ) );

    String expected = "{" +
            "\"multiset\":[\"foo\",\"abc\",\"abc\",null]," +
            "\"hashMultiset\":[\"abc\",\"abc\"]," +
            "\"linkedHashMultiset\":[\"foo\",\"abc\",\"abc\",null]," +
            "\"sortedMultiset\":[\"abc\",\"abc\",\"bar\",\"foo\"]," +
            "\"treeMultiset\":[\"abc\",\"abc\",\"bar\",\"foo\"]," +
            "\"immutableMultiset\":[\"foo\",\"abc\",\"abc\",\"bar\"]," +
            "\"enumMultiset\":[\"A\",\"A\",\"B\",\"D\"]" +
            "}";

    assertEquals( expected, BeanWithMultisetTypesMapper.INSTANCE.write( bean ) );
}
项目:gwt-jackson    文件:MultisetGwtTest.java   
public void testDeserialization() {
    String input = "{" +
            "\"multiset\":[\"foo\",\"abc\",\"abc\",null]," +
            "\"hashMultiset\":[\"abc\",\"abc\"]," +
            "\"linkedHashMultiset\":[\"foo\",\"abc\",\"abc\",null]," +
            "\"sortedMultiset\":[\"foo\",\"abc\",\"bar\",\"abc\",null]," +
            "\"treeMultiset\":[\"bar\",\"abc\",\"abc\",\"foo\",null]," +
            "\"immutableMultiset\":[\"foo\",\"abc\",\"abc\",\"bar\",null]," +
            "\"enumMultiset\":[\"B\",\"A\",\"A\",\"D\",null]" +
            "}";

    BeanWithMultisetTypes result = BeanWithMultisetTypesMapper.INSTANCE.read( input );
    assertNotNull( result );

    List<String> expectedList = Arrays.asList( "foo", "abc", null, "abc" );
    List<String> expectedListWithNonNull = Arrays.asList( "foo", "abc", "bar", "abc" );

    assertEquals( LinkedHashMultiset.create( expectedList ), result.multiset );
    assertEquals( HashMultiset.create( Arrays.asList( "abc", "abc" ) ), result.hashMultiset );
    assertEquals( LinkedHashMultiset.create( expectedList ), result.linkedHashMultiset );
    assertEquals( TreeMultiset.create( expectedListWithNonNull ), result.sortedMultiset );
    assertEquals( TreeMultiset.create( expectedListWithNonNull ), result.treeMultiset );
    assertEquals( ImmutableMultiset.copyOf( expectedListWithNonNull ), result.immutableMultiset );
    assertEquals( EnumMultiset.create( Arrays.asList( AlphaEnum.B, AlphaEnum.A, AlphaEnum.D, AlphaEnum.A ) ), result.enumMultiset );
}
项目:CSS-Editor-FX    文件:CSSContext.java   
public CSSContext() {
  selectors = LinkedHashMultiset.create();
  classes = LinkedHashMultiset.create();
  javaClasses = LinkedHashMultiset.create();
  ids = LinkedHashMultiset.create();
  states = LinkedHashMultiset.create();
  entries = LinkedListMultimap.create();
  paints = LinkedListMultimap.create();
}
项目:xtext-extras    文件:TypeConformanceComputer.java   
/**
 * Keeps the cumulated distance for all the common raw super types of the given references.
 * Interfaces that are more directly implemented will get a lower total count than more general
 * interfaces.
 */
protected void cumulateDistance(final List<LightweightTypeReference> references, Multimap<JvmType, LightweightTypeReference> all,
        Multiset<JvmType> cumulatedDistance) {
    for(LightweightTypeReference other: references) {
        Multiset<JvmType> otherDistance = LinkedHashMultiset.create();
        initializeDistance(other, all, otherDistance);
        cumulatedDistance.retainAll(otherDistance);
        for(Multiset.Entry<JvmType> typeToDistance: otherDistance.entrySet()) {
            if (cumulatedDistance.contains(typeToDistance.getElement()))
                cumulatedDistance.add(typeToDistance.getElement(), typeToDistance.getCount());
        }
    }
}
项目:registry    文件:Schema.java   
private static Multiset<Field> parseArray(List<Object> array) throws ParserException {
    Multiset<Field> members = LinkedHashMultiset.create();
    for(Object member: array) {
        members.add(parseField(null, member));
    }
    return members;
}
项目:CSS-Editor-FX    文件:CSSContext.java   
public CSSContext() {
  selectors = LinkedHashMultiset.create();
  classes = LinkedHashMultiset.create();
  javaClasses = LinkedHashMultiset.create();
  ids = LinkedHashMultiset.create();
  states = LinkedHashMultiset.create();
  entries = LinkedListMultimap.create();
  paints = LinkedListMultimap.create();
}
项目:welshare    文件:TwitterService.java   
private void fillStats(List<Status> list, Map<DateMidnight, Integer> map, DateMidnight start, DateMidnight end) {
    LinkedList<Status> linkedList = new LinkedList<Status>(list);
    Iterator<Status> iterator = linkedList.descendingIterator();
    if (!iterator.hasNext()) {
        return;
    }

    Multiset<DateMidnight> data = LinkedHashMultiset.create();

    Status currentStatus = iterator.next();
    DateMidnight current = new DateMidnight(currentStatus.getCreatedAt());
    while (iterator.hasNext() || currentStatus != null) {
        DateMidnight msgTime = new DateMidnight(currentStatus.getCreatedAt());
        if (current.equals(msgTime)) {
            data.add(current);
            if (iterator.hasNext()) {
                currentStatus = iterator.next();
            } else {
                currentStatus = null;
            }
        } else {
            current = current.plusDays(1);
        }
    }

    for (DateMidnight dm = start; !dm.isAfter(end); dm = dm.plusDays(1)) {
        map.put(dm, data.count(dm));
    }
}
项目:SPLevo    文件:JaMoPPProgramDependencyVPMAnalyzer.java   
/**
 * Identify the dependencies between the variation points based on referring and referred
 * elements.<br>
 * Build and return an edge descriptor for each of those pairs.
 *
 * @param referenceSelector
 *            The selector for the references to consider.
 * @param index
 *            The index containing previously identified element references.
 * @return The list of identified edge descriptors.
 */
private List<VPMEdgeDescriptor> identifyDependencies(ReferenceSelector referenceSelector, VPReferenceIndex index) {
    List<VPMEdgeDescriptor> edges = Lists.newArrayList();
    List<String> edgeRegistry = new ArrayList<String>();
    Multiset<DependencyType> statistics = LinkedHashMultiset.create();

    for (Commentable element : index.referencedElementsIndex.keySet()) {
        List<VPMEdgeDescriptor> vpEdges = identifyRelatedVPsForReferencedElement(edgeRegistry, element,
                referenceSelector, index, statistics);
        edges.addAll(vpEdges);
    }

    printStatistics(statistics);
    return edges;
}
项目:SPLevo    文件:CopyContainingFilenameHandler.java   
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {

    ISelection curSelection = HandlerUtil.getCurrentSelection(event);
    if (curSelection == null || !(curSelection instanceof IStructuredSelection)) {
        return null;
    }

    IStructuredSelection selection = (IStructuredSelection) curSelection;
    LinkedHashMultiset<String> filePaths = collectFilePaths(selection);
    String clipboardContent = buildClipboardContent(filePaths);
    copyToClipboard(clipboardContent);

    return null;
}
项目:SPLevo    文件:CopyContainingFilenameHandler.java   
private String buildClipboardContent(LinkedHashMultiset<String> filePaths) {
    StringBuilder copyContent = new StringBuilder();
    for (String file : filePaths.elementSet()) {
        copyContent.append(FilenameUtils.getName(file));
        copyContent.append(",");
        copyContent.append(filePaths.count(file));
        copyContent.append("\r\n");
    }
    return copyContent.toString();
}
项目:guice-old    文件:WeakKeySet.java   
public void add(Key<?> key, State state, Object source) {
  if (backingSet == null) {
    backingSet = Maps.newHashMap();
  }
  // if it's an instanceof Class, it was a JIT binding, which we don't
  // want to retain.
  if (source instanceof Class || source == SourceProvider.UNKNOWN_SOURCE) {
    source = null;
  }
  Object mapKey = toMapKey(key);
  Multiset<Object> sources = backingSet.get(mapKey);
  if (sources == null) {
    sources = LinkedHashMultiset.create();
    backingSet.put(mapKey, sources);
  }
  Object convertedSource = Errors.convert(source);
  sources.add(convertedSource);

  // Avoid all the extra work if we can.
  if (state.parent() != State.NONE) {
    Set<KeyAndSource> keyAndSources = evictionCache.getIfPresent(state);
    if (keyAndSources == null) {
      evictionCache.put(state, keyAndSources = Sets.newHashSet());
    }
    keyAndSources.add(new KeyAndSource(mapKey, convertedSource));
  }
}
项目:RinLog    文件:RandomRoutePlanner.java   
/**
 * Creates a random route planner using the specified random seed.
 * @param seed The random seed.
 */
public RandomRoutePlanner(long seed) {
  LOGGER.info("constructor {}", seed);
  assignedParcels = LinkedHashMultiset.create();
  current = Optional.absent();
  rng = new RandomAdaptor(new MersenneTwister(seed));
}
项目:guava-mock    文件:FreshValueGenerator.java   
@Generates private static <E> LinkedHashMultiset<E> generateLinkedHashMultiset(E freshElement) {
  LinkedHashMultiset<E> multiset = LinkedHashMultiset.create();
  multiset.add(freshElement);
  return multiset;
}
项目:guava-mock    文件:FreshValueGeneratorTest.java   
public void testLinkedHashMultiset() {
  assertFreshInstance(new TypeToken<LinkedHashMultiset<String>>() {});
}
项目:googles-monorepo-demo    文件:FreshValueGenerator.java   
@Generates private static <E> LinkedHashMultiset<E> generateLinkedHashMultiset(E freshElement) {
  LinkedHashMultiset<E> multiset = LinkedHashMultiset.create();
  multiset.add(freshElement);
  return multiset;
}
项目:googles-monorepo-demo    文件:FreshValueGeneratorTest.java   
public void testLinkedHashMultiset() {
  assertFreshInstance(new TypeToken<LinkedHashMultiset<String>>() {});
}
项目:FreeBuilder    文件:MultisetProperty.java   
@Override
public void addBuilderFieldDeclaration(SourceBuilder code) {
  code.addLine("private final %1$s<%2$s> %3$s = %1$s.create();",
      LinkedHashMultiset.class, elementType, property.getField());
}
项目:guava-libraries    文件:FreshValueGenerator.java   
@Generates private static <E> LinkedHashMultiset<E> generateLinkedHashMultiset(E freshElement) {
  LinkedHashMultiset<E> multiset = LinkedHashMultiset.create();
  multiset.add(freshElement);
  return multiset;
}
项目:guava-libraries    文件:FreshValueGeneratorTest.java   
public void testLinkedHashMultiset() {
  assertFreshInstance(new TypeToken<LinkedHashMultiset<String>>() {});
}
项目:cleartk    文件:SumBasicModel.java   
public TermFrequencyMap() {
  this.termFrequencies = LinkedHashMultiset.create();
}
项目:cleartk    文件:TfidfExtractor.java   
public IDFMap() {
  this.documentFreqMap = LinkedHashMultiset.create();
  this.totalDocumentCount = 0;
}
项目:welshare    文件:MessageServiceImpl.java   
@Override
@SqlReadonlyTransactional
public WelshareStats getStats(User user) {
    WelshareStats stats = new WelshareStats();
    List<Message> messages = getDao().getUserMessages(user, 200, 0);

    if (messages.isEmpty()) {
        return stats;
    }

    LinkedList<Message> linkedList = new LinkedList<Message>(messages);
    Iterator<Message> iterator = linkedList.descendingIterator();

    Multiset<DateMidnight> messagesData = LinkedHashMultiset.create();
    Multiset<DateMidnight> likesData = LinkedHashMultiset.create();
    Multiset<DateMidnight> repliesData = LinkedHashMultiset.create();

    Message currentMessage = iterator.next();
    DateMidnight current = new DateMidnight(currentMessage.getDateTime());
    DateMidnight start = current;
    while (iterator.hasNext() || currentMessage != null) {
        // skip imported messages
        DateMidnight msgTime = new DateMidnight(currentMessage.getDateTime());
        if (current.equals(msgTime)) {
            if (!currentMessage.isImported()) {
                messagesData.add(current);
                likesData.add(current, currentMessage.getScore());
                repliesData.add(current, currentMessage.getReplies());
            }
            if (iterator.hasNext()) {
                currentMessage = iterator.next();
            } else {
                currentMessage = null;
            }
        } else {
            current = current.plusDays(1);
        }
    }
    DateMidnight end = current;
    if (Days.daysBetween(start, end).getDays() > 30) {
        start = end.minusDays(30);
    }

    for (DateMidnight dm = start; !dm.isAfter(end); dm = dm.plusDays(1)) {
        stats.getMessages().put(dm, messagesData.count(dm));
        stats.getReplies().put(dm, repliesData.count(dm));
        stats.getLikes().put(dm, likesData.count(dm));
    }


    int days = Days.daysBetween(start, end).getDays();
    if (days == 0) {
        return stats; // no further calculation
    }

    int[] messagesMaxAndSum = CollectionUtils.getMaxAndSum(stats.getMessages());
    stats.setMaxMessages(messagesMaxAndSum[0]);
    stats.setAverageMessages(messagesMaxAndSum[1] / days);

    int[] likesMaxAndSum = CollectionUtils.getMaxAndSum(stats.getLikes());
    stats.setMaxLikes(likesMaxAndSum[0]);
    stats.setAverageLikes(likesMaxAndSum[1] / days);

    int[] repliesMaxAndSum = CollectionUtils.getMaxAndSum(stats.getReplies());
    stats.setMaxReplies(repliesMaxAndSum[0]);
    stats.setAverageReplies(repliesMaxAndSum[1] / days);

    stats.setMaxCount(NumberUtils.max(
            stats.getMaxMessages(),
            stats.getMaxReplies(),
            stats.getMaxLikes()));

    return stats;
}
项目:flow    文件:RetryingException.java   
@Override
public String getMessage() {
    Multiset<String> multiset = LinkedHashMultiset.create();
    exceptions.forEach(e -> multiset.add(e.getMessage() == null ? e.getClass().getName() : e.getMessage()));
    return "Failed after " + exceptions.size() + " tries: " + multiset;
}
项目:guava    文件:FreshValueGenerator.java   
@Generates
private static <E> LinkedHashMultiset<E> generateLinkedHashMultiset(E freshElement) {
  LinkedHashMultiset<E> multiset = LinkedHashMultiset.create();
  multiset.add(freshElement);
  return multiset;
}
项目:guava    文件:FreshValueGeneratorTest.java   
public void testLinkedHashMultiset() {
  assertFreshInstance(new TypeToken<LinkedHashMultiset<String>>() {});
}
项目:guava    文件:FreshValueGenerator.java   
@Generates
private static <E> LinkedHashMultiset<E> generateLinkedHashMultiset(E freshElement) {
  LinkedHashMultiset<E> multiset = LinkedHashMultiset.create();
  multiset.add(freshElement);
  return multiset;
}
项目:guava    文件:FreshValueGeneratorTest.java   
public void testLinkedHashMultiset() {
  assertFreshInstance(new TypeToken<LinkedHashMultiset<String>>() {});
}
项目:BfROpenLab    文件:PmmUtils.java   
public static <T> T getMaxCounted(Collection<T> values) {
    return Collections.max(LinkedHashMultiset.create(values).entrySet(),
            (o1, o2) -> Integer.compare(o1.getCount(), o2.getCount())).getElement();
}
项目:gwt-jackson    文件:LinkedHashMultisetJsonDeserializer.java   
@Override
protected LinkedHashMultiset<T> newCollection() {
    return LinkedHashMultiset.create();
}
项目:gwt-jackson    文件:MultisetJsonDeserializer.java   
@Override
protected Multiset<T> newCollection() {
    return LinkedHashMultiset.create();
}
项目:gwt-jackson    文件:GuavaConfiguration.java   
@Override
protected void configure() {
    type( Optional.class ).serializer( OptionalJsonSerializer.class ).deserializer( OptionalJsonDeserializer.class );
    type( FluentIterable.class ).serializer( IterableJsonSerializer.class );

    // Immutable Collections
    type( ImmutableCollection.class ).serializer( CollectionJsonSerializer.class )
            .deserializer( ImmutableCollectionJsonDeserializer.class );
    type( ImmutableList.class ).serializer( CollectionJsonSerializer.class ).deserializer( ImmutableListJsonDeserializer.class );
    type( ImmutableSet.class ).serializer( CollectionJsonSerializer.class ).deserializer( ImmutableSetJsonDeserializer.class );
    type( ImmutableSortedSet.class ).serializer( CollectionJsonSerializer.class )
            .deserializer( ImmutableSortedSetJsonDeserializer.class );

    // Immutable Map
    type( ImmutableMap.class ).serializer( MapJsonSerializer.class ).deserializer( ImmutableMapJsonDeserializer.class );
    type( ImmutableSortedMap.class ).serializer( MapJsonSerializer.class ).deserializer( ImmutableSortedMapJsonDeserializer.class );

    // BiMap
    type( BiMap.class ).serializer( MapJsonSerializer.class ).deserializer( BiMapJsonDeserializer.class );
    type( ImmutableBiMap.class ).serializer( MapJsonSerializer.class ).deserializer( ImmutableBiMapJsonDeserializer.class );
    type( HashBiMap.class ).serializer( MapJsonSerializer.class ).deserializer( HashBiMapJsonDeserializer.class );
    type( EnumBiMap.class ).serializer( MapJsonSerializer.class ).deserializer( EnumBiMapJsonDeserializer.class );
    type( EnumHashBiMap.class ).serializer( MapJsonSerializer.class ).deserializer( EnumHashBiMapJsonDeserializer.class );

    // Multiset
    type( Multiset.class ).serializer( CollectionJsonSerializer.class ).deserializer( MultisetJsonDeserializer.class );
    type( HashMultiset.class ).serializer( CollectionJsonSerializer.class ).deserializer( HashMultisetJsonDeserializer.class );
    type( LinkedHashMultiset.class ).serializer( CollectionJsonSerializer.class )
            .deserializer( LinkedHashMultisetJsonDeserializer.class );
    type( SortedMultiset.class ).serializer( CollectionJsonSerializer.class ).deserializer( SortedMultisetJsonDeserializer.class );
    type( TreeMultiset.class ).serializer( CollectionJsonSerializer.class ).deserializer( TreeMultisetJsonDeserializer.class );
    type( ImmutableMultiset.class ).serializer( CollectionJsonSerializer.class )
            .deserializer( ImmutableMultisetJsonDeserializer.class );
    type( EnumMultiset.class ).serializer( CollectionJsonSerializer.class ).deserializer( EnumMultisetJsonDeserializer.class );

    // Multimap
    type( Multimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( MultimapJsonDeserializer.class );

    type( ImmutableMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( ImmutableMultimapJsonDeserializer.class );
    type( ImmutableSetMultimap.class ).serializer( MultimapJsonSerializer.class )
            .deserializer( ImmutableSetMultimapJsonDeserializer.class );
    type( ImmutableListMultimap.class ).serializer( MultimapJsonSerializer.class )
            .deserializer( ImmutableListMultimapJsonDeserializer.class );

    type( SetMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( SetMultimapJsonDeserializer.class );
    type( HashMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( HashMultimapJsonDeserializer.class );
    type( LinkedHashMultimap.class ).serializer( MultimapJsonSerializer.class )
            .deserializer( LinkedHashMultimapJsonDeserializer.class );
    type( SortedSetMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( SortedSetMultimapJsonDeserializer.class );
    type( TreeMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( TreeMultimapJsonDeserializer.class );

    type( ListMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( ListMultimapJsonDeserializer.class );
    type( ArrayListMultimap.class ).serializer( MultimapJsonSerializer.class ).deserializer( ArrayListMultimapJsonDeserializer.class );
    type( LinkedListMultimap.class ).serializer( MultimapJsonSerializer.class )
            .deserializer( LinkedListMultimapJsonDeserializer.class );
}
项目:bacter    文件:ACGLikelihood.java   
/**
 * Ensure pattern counts are up to date.
 */
private void updatePatterns() {
    List<Region> regionList = acg.getRegions(locus);

    // Remove stale pattern sets
    patterns.keySet().retainAll(regionList);
    patternLogLikelihoods.keySet().retainAll(regionList);
    rootPartials.keySet().retainAll(regionList);
    constantPatterns.keySet().retainAll(regionList);

    for (Region region : regionList) {

        if (patterns.containsKey(region))
            continue;

        // Add new pattern set
        Multiset<int[]> patSet = LinkedHashMultiset.create();
        for (int j=region.leftBoundary; j<region.rightBoundary; j++) {
            int [] pat = alignment.getPattern(alignment.getPatternIndex(j));
            patSet.add(pat);
        }
        patterns.put(region, patSet);

        // Allocate memory for corresponding log likelihoods and root partials
        patternLogLikelihoods.put(region, new double[patSet.elementSet().size()]);
        rootPartials.put(region, new double[patSet.elementSet().size()*nStates]);

        // Compute corresponding constant pattern list
        List<Integer> constantPatternList = new ArrayList<>();

        int patternIdx = 0;
        for (int[] pattern : patSet.elementSet()) {
            boolean isConstant = true;
            for (int i=1; i<pattern.length; i++)
                if (pattern[i] != pattern[0]) {
                    isConstant = false;
                    break;
                }

            if (isConstant) {
                if (alignment.getDataType().isAmbiguousState(pattern[0])) {
                    if (useAmbiguitiesInput.get()) {
                        for (int state : alignment.getDataType().getStatesForCode(pattern[0]))
                            constantPatternList.add(patternIdx * nStates + state);
                    }
                } else {
                    constantPatternList.add(patternIdx * nStates + pattern[0]);
                }
            }

            patternIdx += 1;
        }

        constantPatterns.put(region, constantPatternList);
    }
}
项目:bacter    文件:ACGLikelihoodBeagle.java   
/**
 * Ensure pattern counts are up to date.
 */
private void updatePatterns() {
    List<Region> regionList = acg.getRegions(locus);

    // Remove stale pattern sets
    patterns.keySet().retainAll(regionList);
    constantPatterns.keySet().retainAll(regionList);

    for (Region region : regionList) {

        if (patterns.containsKey(region))
            continue;

        // Add new pattern set
        Multiset<int[]> patSet = LinkedHashMultiset.create();
        for (int j=region.leftBoundary; j<region.rightBoundary; j++) {
            int [] pat = alignment.getPattern(alignment.getPatternIndex(j));
            patSet.add(pat);
        }
        patterns.put(region, patSet);

        // Compute corresponding constant pattern list
        List<Integer> constantPatternList = new ArrayList<>();

        int patternIdx = 0;
        for (int[] pattern : patSet.elementSet()) {
            boolean isConstant = true;
            for (int i=1; i<pattern.length; i++)
                if (pattern[i] != pattern[0]) {
                    isConstant = false;
                    break;
                }

            if (isConstant) {
                if (alignment.getDataType().isAmbiguousState(pattern[0])) {
                    if (useAmbiguitiesInput.get()) {
                        for (int state : alignment.getDataType().getStatesForCode(pattern[0]))
                            constantPatternList.add(patternIdx * nStates + state);
                    }
                } else {
                    constantPatternList.add(patternIdx * nStates + pattern[0]);
                }
            }

            patternIdx += 1;
        }

        constantPatterns.put(region, constantPatternList);
    }
}