Java 类org.apache.commons.collections.set.ListOrderedSet 实例源码

项目:Decision    文件:SaveToCassandraOperationsServiceTest.java   
@Test
public void testAlterTable() throws Exception {
    Exception ex= null;
    try {

        columns.add(new ColumnNameTypeValue("newfield", ColumnType.STRING, "my new value"));
        Set<String> oldColumnNamesnew= new ListOrderedSet();
        oldColumnNamesnew.add("id");
        oldColumnNamesnew.add("name");
        oldColumnNamesnew.add("enabled");
        oldColumnNamesnew.add("timestamp");

        service.alterTable(TABLE, oldColumnNamesnew, columns);

    } catch (Exception e)  {
        ex= e;
    }
    assertEquals("Expected null but exception found", null, ex);

}
项目:Decision    文件:StreamToActionBusCallbackTest.java   
@Before
public void setUp() throws Exception {
    siddhiManager= new StreamingSiddhiConfiguration().siddhiManager();
    siddhiManager.defineStream(StreamsHelper.STREAM_DEFINITION);
    metadataService= new StreamMetadataService(siddhiManager);
    javaToSiddhiSerializer= new JavaToSiddhiSerializer(metadataService);
    javaToAvroSerializer = new JavaToAvroSerializer(new SpecificDatumReader(InsertMessage.getClassSchema()));

    Set<StreamAction> activeActions= new ListOrderedSet();
    activeActions.add(StreamAction.LISTEN);

    producer= Mockito.mock(Producer.class);
    avroProducer= Mockito.mock(Producer.class);
    //List<KeyedMessage<String, String>> km= any();
    //doNothing().when(producer).send(km);
    doNothing().when(producer).send(Matchers.<List<KeyedMessage<String, String>>>any());

    cbk= new StreamToActionBusCallback(activeActions, streamName, avroProducer,
            javaToSiddhiSerializer, javaToAvroSerializer);
}
项目:apache-ddlutils    文件:ForeignKey.java   
/**
 * {@inheritDoc}
 */
public Object clone() throws CloneNotSupportedException
{
    ForeignKey result = (ForeignKey)super.clone();

    result._name             = _name;
    result._foreignTableName = _foreignTableName;
    result._references       = new ListOrderedSet();

    for (Iterator it = _references.iterator(); it.hasNext();)
    {
        result._references.add(((Reference)it.next()).clone());
    }

    return result;
}
项目:gemfirexd-oss    文件:DumpMetadataTask.java   
/**
 * Determines the columns that are present in the given result set.
 * 
 * @param resultSet The result set
 * @return The columns
 */
private Set getColumnsInResultSet(ResultSet resultSet) throws SQLException
{
    ListOrderedSet    result   = new ListOrderedSet();
    ResultSetMetaData metaData = resultSet.getMetaData();

    for (int idx = 1; idx <= metaData.getColumnCount(); idx++)
    {
        result.add(metaData.getColumnName(idx).toUpperCase());
    }

    return result;
}
项目:gemfirexd-oss    文件:DumpMetadataTask.java   
/**
 * Determines the columns that are present in the given result set.
 * 
 * @param resultSet The result set
 * @return The columns
 */
private Set getColumnsInResultSet(ResultSet resultSet) throws SQLException
{
    ListOrderedSet    result   = new ListOrderedSet();
    ResultSetMetaData metaData = resultSet.getMetaData();

    for (int idx = 1; idx <= metaData.getColumnCount(); idx++)
    {
        result.add(metaData.getColumnName(idx).toUpperCase());
    }

    return result;
}
项目:ga    文件:BasicSusSelector.java   
/**
 * Sorts the population & Marks the ELITE_PROPORTION Individuals as Elite.
 * 
 * Note that the population remains sorted.
 * 
 * @param population
 * @return 
 */
private V markElite(Population<I, A, C, V> population) {
    Collections.sort(population);
    Collections.reverse(population);
    Set<I> uniq = ListOrderedSet.decorate(population);
    int elite = (int) (population.size() * ELITE_PROPORTION);
    Iterator<I> eliteIt = uniq.iterator();
    for (int i = 0; i < elite; i++) {
        eliteIt.next().setElite(true);
    }
    return (V) eliteIt.next().getAptitude();
}
项目:ojb    文件:SqlSelectStatement.java   
/**
 * Return the Fields to be selected.
 *
 * @param cld the ClassDescriptor
 * @return the Fields to be selected
 */
protected FieldDescriptor[] buildFieldsForSelect(ClassDescriptor cld)
{
    DescriptorRepository repository = cld.getRepository();
    Set fields = new ListOrderedSet();   // keep the order of the fields

    // add Standard Fields
    // MBAIRD: if the object being queried on has multiple classes mapped to the table,
    // then we will get all the fields that are a unique set across all those classes so if we need to
    // we can materialize an extent
    FieldDescriptor fds[] = repository.getFieldDescriptorsForMultiMappedTable(cld);
    for (int i = 0; i < fds.length; i++)
    {
        fields.add(fds[i]);
    }

    // add inherited Fields. This is important when querying for a class having a super-reference
    fds = cld.getFieldDescriptor(true);
    for (int i = 0; i < fds.length; i++)
    {
        fields.add(fds[i]);
    }

    // add Fields of joined subclasses
    Class[] multiJoinedClasses = repository.getSubClassesMultipleJoinedTables(cld, true);
    for (int c = 0; c < multiJoinedClasses.length; c++)
    {
        ClassDescriptor subCld = repository.getDescriptorFor(multiJoinedClasses[c]);
        fds = subCld.getFieldDescriptions();
        for (int i = 0; i < fds.length; i++)
        {
            fields.add(fds[i]);
        }
    }

    FieldDescriptor[] result = new FieldDescriptor[fields.size()];
    fields.toArray(result);
    return result;
}
项目:Decision    文件:StreamsHelper.java   
public static StratioStreamingMessage getSampleMessage()    {
    Set<StreamAction> actions= new ListOrderedSet();
    actions.add(StreamAction.LISTEN);
    StratioStreamingMessage message= new StratioStreamingMessage(STREAM_NAME, Long.parseLong("1234567890"), COLUMNS);
    message.setActiveActions(actions);
    return message;
}
项目:Decision    文件:ActionBaseFunctionHelper.java   
protected void initialize()  {
    conf= ConfigFactory.load();

    ZOO_HOST= getHostsStringFromList(conf.getStringList("zookeeper.hosts"));
    MONGO_HOST= getHostsStringFromList(conf.getStringList("mongo.hosts"));

    siddhiManager= new StreamingSiddhiConfiguration().siddhiManager();
    streamStatusDao= new StreamStatusDao();
    ServiceConfiguration serviceConfiguration= new ServiceConfiguration();
    callbackService= serviceConfiguration.callbackService();

    streamOperationsService= new StreamOperationService(siddhiManager, streamStatusDao, callbackService);

    streamOperationsService.createStream(StreamsHelper.STREAM_NAME, StreamsHelper.COLUMNS);
    String queryId= streamOperationsService.addQuery(StreamsHelper.STREAM_NAME, StreamsHelper.QUERY);
    message= StreamsHelper.getSampleMessage();
    message.setRequest(StreamsHelper.QUERY);

    validators= new ListOrderedSet();
    StreamNameNotEmptyValidation validation= new StreamNameNotEmptyValidation();
    validators.add(validation);

    Properties properties = new Properties();
    properties.put("serializer.class", "kafka.serializer.StringEncoder");
    properties.put("metadata.broker.list",  conf.getStringList("kafka.hosts").get(0));
    properties.put("producer.type", "async");

    producer = new kafka.javaapi.producer.Producer<String, String>(new ProducerConfig(properties));

    ConfigurationContext configurationContext = new ConfigurationContext();
    try {
        ClusterSyncManager.getClusterSyncManager(configurationContext, null);
    } catch (Exception e) {
        e.printStackTrace();
    }
}