Java 类org.apache.commons.collections.iterators.SingletonIterator 实例源码

项目:feathers-sdk    文件:ValueInitializer.java   
/**
 * return an iterator over our definition if we have one, and all the definitions of our children
 */
public Iterator getDefinitionsIterator()
{
    IteratorList iterList = null;

    if (hasDefinition())
    {
        //  Note: isDescribed() guard omits our own definition if we're in a descriptor tree
        //  TODO remove this once DI is done directly
        if (!(value instanceof Model) || !((Model)value).isDescribed())
        {
            (iterList = new IteratorList()).add(new SingletonIterator(getDefinitionBody()));
        }
    }

    if (value instanceof Model)
    {
        (iterList != null ? iterList : (iterList = new IteratorList())).add(((Model)value).getSubDefinitionsIterator());
    }

    return iterList != null ? iterList.toIterator() : Collections.EMPTY_LIST.iterator();
}
项目:spliceengine    文件:FileFunction.java   
/**
 *
 * Call Method for parsing the string into either a singleton List with a ExecRow or
 * an empty list.
 *
 * @param s
 * @return
 * @throws Exception
 */
@Override
public Iterator<ExecRow> call(final String s) throws Exception {
    if (operationContext.isFailed())
        return Collections.<ExecRow>emptyList().iterator();
    if (!initialized) {
        Reader reader = new StringReader(s);
        checkPreference();
        tokenizer= new MutableCSVTokenizer(reader,preference);
        initialized = true;
    }
    try {
        tokenizer.setLine(s);
        List<String> read=tokenizer.read();
        BooleanList quotedColumns=tokenizer.getQuotedColumns();
        ExecRow lr =  call(read,quotedColumns);
        return lr==null?Collections.<ExecRow>emptyList().iterator():new SingletonIterator(lr);
    } catch (Exception e) {
        if (operationContext.isPermissive()) {
            operationContext.recordBadRecord(e.getLocalizedMessage(), e);
            return Collections.<ExecRow>emptyList().iterator();
        }
        throw StandardException.plainWrapException(e);
    }
}
项目:spliceengine    文件:NormalizeFunction.java   
@Override
public Iterator<ExecRow> call(ExecRow sourceRow) throws Exception {

    NormalizeOperation normalize = operationContext.getOperation();
    normalize.source.setCurrentRow(sourceRow);
    if (sourceRow != null) {
        ExecRow normalized = null;
        try {
            normalized = normalize.normalizeRow(sourceRow, requireNotNull);
        } catch (StandardException e) {
            if (operationContext!=null && operationContext.isPermissive()) {
                operationContext.recordBadRecord(e.getLocalizedMessage() + sourceRow.toString(), e);
                return Collections.<ExecRow>emptyList().iterator();
            }
            throw e;
        }
        getActivation().setCurrentRow(normalized, normalize.getResultSetNumber());
        return new SingletonIterator(normalized.getClone());
    }else return Collections.<ExecRow>emptyList().iterator();
}
项目:spliceengine    文件:InnerJoinRestrictionFlatMapFunction.java   
@Override
public Iterator<ExecRow> call(Tuple2<ExecRow, Iterable<ExecRow>> tuple) throws Exception {
    checkInit();
    leftRow = tuple._1();
    Iterator<ExecRow> it = tuple._2.iterator();
    while (it.hasNext()) {
        rightRow = it.next();
        mergedRow = JoinUtils.getMergedRow(leftRow,
                rightRow, op.wasRightOuterJoin,
                executionFactory.getValueRow(numberOfColumns));
        op.setCurrentRow(mergedRow);
        if (op.getRestriction().apply(mergedRow)) { // Has Row, abandon
            op.setCurrentRow(mergedRow);
            return new SingletonIterator(mergedRow);
        }
        operationContext.recordFilter();
    }
    return Collections.EMPTY_LIST.iterator();
}
项目:spliceengine    文件:AntiJoinRestrictionFlatMapFunction.java   
@Override
public Iterator<ExecRow> call(Tuple2<ExecRow, Iterable<ExecRow>> tuple) throws Exception {
    checkInit();
    leftRow = tuple._1();
    Iterator<ExecRow> it = tuple._2.iterator();
    while (it.hasNext()) {
        rightRow = it.next();
        mergedRow = JoinUtils.getMergedRow(leftRow,
                rightRow, op.wasRightOuterJoin,
                executionFactory.getValueRow(numberOfColumns));
        op.setCurrentRow(mergedRow);
        if (op.getRestriction().apply(mergedRow)) { // Has Row, abandon
            operationContext.recordFilter();
            return Collections.<ExecRow>emptyList().iterator();
        }
    }
    // No Rows Matched...
    ExecRow returnRow = JoinUtils.getMergedRow(leftRow,
            op.getEmptyRow(), op.wasRightOuterJoin,
            executionFactory.getValueRow(numberOfColumns));
    op.setCurrentRow(returnRow);
    return new SingletonIterator(returnRow);
}
项目:param-sweeper    文件:SingleValueSweep.java   
@SuppressWarnings("unchecked")
@Override
public Iterator<Configuration> iterator() {
    SingletonIterator it = new SingletonIterator(value, false);
    TransformIterator itConfig = new TransformIterator(it,
            new ConfigurationTranformer(param));

    return itConfig;
}
项目:spliceengine    文件:ControlExportDataSetWriter.java   
@Override
public DataSet<ExecRow> write() throws StandardException{
    Integer count;
    String extension = ".csv";
    long start = System.currentTimeMillis();
    SpliceOperation operation=exportFunction.getOperation();
    boolean isCompressed = path.endsWith(".gz");
    if(!isCompressed && operation instanceof ExportOperation){
        ExportOperation op=(ExportOperation)exportFunction.getOperation();
        isCompressed=op.getExportParams().isCompression();
        if(isCompressed){
            extension+=".gz";
        }
    }
    try{
        final DistributedFileSystem dfs=SIDriver.driver().getSIEnvironment().fileSystem(path);
        dfs.createDirectory(path,false);
        // The 'part-r-00000' naming convention is what spark uses so we are consistent on control side
        try(OutputStream fileOut =dfs.newOutputStream(path /*directory*/,"part-r-00000"+extension/*file*/,StandardOpenOption.CREATE)){
            OutputStream toWrite=fileOut;
            if(isCompressed){
                toWrite=new GZIPOutputStream(fileOut);
            }
            count=exportFunction.call(toWrite,dataSet.toLocalIterator());
        }
        dfs.touchFile(path, ExportFile.SUCCESS_FILE);
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
    long end = System.currentTimeMillis();

    ValueRow valueRow = new ValueRow(2);
    valueRow.setColumn(1,new SQLLongint(count));
    valueRow.setColumn(2,new SQLLongint(end-start));
    return new ControlDataSet<>(new SingletonIterator(valueRow));
}
项目:spliceengine    文件:ScalarAggregateFlatMapFunction.java   
@SuppressWarnings("unchecked")
    @Override
    public Iterator<ExecRow> call(Iterator<ExecRow> locatedRows) throws Exception {
        if (!initialized) {
            op = getOperation();
            initialized = true;
        }

        if (!locatedRows.hasNext()) {
            if (returnDefault) {
                ExecRow valueRow = getOperation().getSourceExecIndexRow().getClone();
                op.finishAggregation(valueRow);
                return new SingletonIterator(valueRow);
            } else
                return Collections.EMPTY_LIST.iterator();
        }
        ExecRow r1 = locatedRows.next().getClone();
        if (!op.isInitialized(r1)) {
//            if (RDDUtils.LOG.isTraceEnabled()) {
//                RDDUtils.LOG.trace(String.format("Initializing and accumulating %s", r1));
//            }
            op.initializeVectorAggregation(r1);
        }
        while (locatedRows.hasNext()) {
            ExecRow r2 = locatedRows.next();
            if (!op.isInitialized(r2)) {
                accumulate(r2, r1);                                                                                                                                                      
            } else {
                merge(r2, r1);                                                                                                                                                  
            }
        }
        op.finishAggregation(r1); // calls setCurrentRow
        return new SingletonIterator(r1);
    }
项目:spliceengine    文件:StitchMixedRowFlatMapFunction.java   
@SuppressWarnings("unchecked")
@Override
public Iterator<ExecRow> call(Iterator<ExecRow> rowIterator) throws Exception {
    if (!initialized) {
        op = (GenericAggregateOperation) getOperation();
        aggregates = op.aggregates;
        // build a map between distinct column to aggregator
        distinctAggregateMap = new HashMap<>();
        for (SpliceGenericAggregator aggr:aggregates) {
            if (!aggr.isDistinct())
                continue;
            distinctAggregateMap.put(new Integer(aggr.getInputColumnId()), aggr);
        }
        initialized = true;
    }

    ExecRow valueRow = op.getSourceExecIndexRow().getClone();
    if (!rowIterator.hasNext()) {
        op.finishAggregation(valueRow);
        return new SingletonIterator(valueRow);
    }

    while (rowIterator.hasNext()) {
        ExecRow r2 = rowIterator.next();
        copyRow(valueRow, r2);
    }

    for(SpliceGenericAggregator aggregator:aggregates){
        if (!aggregator.isInitialized(valueRow)) {
            aggregator.initializeAndAccumulateIfNeeded(valueRow, valueRow);
        }
        aggregator.finish(valueRow);
    }
    op.setCurrentRow(valueRow);
    return new SingletonIterator(valueRow);
}
项目:spliceengine    文件:DirectDataSetWriter.java   
@Override
public DataSet<ExecRow> write() throws StandardException{
    try{
        pipelineWriter.open();
        CountingIterator rows=new CountingIterator(dataSet.values().toLocalIterator());
        pipelineWriter.write(rows);
        pipelineWriter.close(); //make sure everything gets written

        ValueRow valueRow=new ValueRow(1);
        valueRow.setColumn(1,new SQLLongint(rows.count));
        return new ControlDataSet<>(new SingletonIterator(valueRow));
    }catch(Exception e){
        throw Exceptions.parseException(e);
    }
}
项目:feathers-sdk    文件:EventInitializer.java   
public Iterator getDefinitionsIterator()
{
    return new SingletonIterator(getDefinitionBody());
}
项目:hiqual    文件:Option.java   
@Override
public Iterator<V> values() {
    return new SingletonIterator(v);
}
项目:spliceengine    文件:ControlDataSetWriter.java   
@Override
public DataSet<ExecRow> write() throws StandardException{
    SpliceOperation operation=operationContext.getOperation();
    Txn txn = null;
    try{
        TxnView parent = getTxn();
        txn = SIDriver.driver().lifecycleManager().beginChildTransaction(
                parent,
                parent.getIsolationLevel(),
                parent.isAdditive(),
                pipelineWriter.getDestinationTable(),
                true);
        pipelineWriter.setTxn(txn);
        operation.fireBeforeStatementTriggers();
        pipelineWriter.open(operation.getTriggerHandler(),operation);
        pipelineWriter.write(dataSet.toLocalIterator());

        long recordsWritten = operationContext.getRecordsWritten();
        operationContext.getActivation().getLanguageConnectionContext().setRecordsImported(operationContext.getRecordsWritten());
        txn.commit(); // Commit before closing pipeline so triggers see our writes
        if(pipelineWriter!=null)
            pipelineWriter.close();
        long badRecords = 0;
        if(operation instanceof InsertOperation){
            InsertOperation insertOperation = (InsertOperation)operation;
            BadRecordsRecorder brr = operationContext.getBadRecordsRecorder();
            /*
             * In Control-side execution, we have different operation contexts for each operation,
             * and all operations are held in this JVM. this means that parse errors could be present
             * at the context for the lower operation (i.e. in an import), so we need to collect those errors
             * directly.
             */
            List<SpliceOperation> ops =insertOperation.getOperationStack();
            for(SpliceOperation op:ops){
                if(op==null || op==insertOperation || op.getOperationContext()==null) continue;
                if (brr != null) {
                    brr = brr.merge(op.getOperationContext().getBadRecordsRecorder());
                } else {
                    brr = op.getOperationContext().getBadRecordsRecorder();
                }
            }
            badRecords = (brr != null ? brr.getNumberOfBadRecords() : 0);
            operationContext.getActivation().getLanguageConnectionContext().setFailedRecords(badRecords);
            if(badRecords > 0){
                String fileName = operationContext.getStatusDirectory();
                operationContext.getActivation().getLanguageConnectionContext().setBadFile(fileName);
                if (insertOperation.isAboveFailThreshold(badRecords)) {
                    throw ErrorState.LANG_IMPORT_TOO_MANY_BAD_RECORDS.newException(fileName);
                }
            }
        }
        ValueRow valueRow=null;
        if (badRecords > 0) {
            valueRow = new ValueRow(3);
            valueRow.setColumn(2, new SQLLongint(badRecords));
            valueRow.setColumn(3, new SQLVarchar(operationContext.getStatusDirectory()));
        }
        else {
            valueRow = new ValueRow(1);
        }
        valueRow.setColumn(1,new SQLLongint(recordsWritten));
        return new ControlDataSet<>(new SingletonIterator(valueRow));
   }catch(Exception e){
        if(txn!=null){
            try{
                txn.rollback();
            }catch(IOException e1){
                e.addSuppressed(e1);
            }
        }
        throw Exceptions.parseException(e);
    }
    finally {
        operation.fireAfterStatementTriggers();
    }
}
项目:spliceengine    文件:ControlDataSetProcessor.java   
@Override
public <V> DataSet<V> singleRowDataSet(V value){
    return new ControlDataSet<>(new SingletonIterator(value));
}
项目:spliceengine    文件:ControlDataSetProcessor.java   
@Override
public <K,V> PairDataSet<K, V> singleRowPairDataSet(K key,V value){
    return new ControlPairDataSet<>(new SingletonIterator(new Tuple2<>(key,value)));
}
项目:spliceengine    文件:SMHiveRecordWriter.java   
private DataSet<ExecRow> getDataSet(ExecRowWritable value) {
    return new ControlDataSet(new SingletonIterator(value.get()));
}
项目:playweb    文件:Iterators.java   
/**
 * Returns an iterator containing the single element <code>element</code> of
 * type <code>T</code>.
 *
 * @param <T>
 * @param element
 * @return
 */
@SuppressWarnings("unchecked")
public static <T> Iterator<T> singleton(T element) {
    return new SingletonIterator(element);
}