Java 类org.apache.hadoop.io.SequenceFile.Reader 实例源码

项目:hadoop-oss    文件:TestSequenceFileSerialization.java   
@Test
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:hadoop    文件:DistCpV1.java   
static private void finalize(Configuration conf, JobConf jobconf,
    final Path destPath, String presevedAttributes) throws IOException {
  if (presevedAttributes == null) {
    return;
  }
  EnumSet<FileAttribute> preseved = FileAttribute.parse(presevedAttributes);
  if (!preseved.contains(FileAttribute.USER)
      && !preseved.contains(FileAttribute.GROUP)
      && !preseved.contains(FileAttribute.PERMISSION)) {
    return;
  }

  FileSystem dstfs = destPath.getFileSystem(conf);
  Path dstdirlist = new Path(jobconf.get(DST_DIR_LIST_LABEL));
  try (SequenceFile.Reader in =
      new SequenceFile.Reader(jobconf, Reader.file(dstdirlist))) {
    Text dsttext = new Text();
    FilePair pair = new FilePair(); 
    for(; in.next(dsttext, pair); ) {
      Path absdst = new Path(destPath, pair.output);
      updateDestStatus(pair.input, dstfs.getFileStatus(absdst),
          preseved, dstfs);
    }
  }
}
项目:hadoop    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:aliyun-oss-hadoop-fs    文件:TestSequenceFileSerialization.java   
@Test
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:GeoCrawler    文件:SequenceReader.java   
@Override
public int count(String path) throws FileNotFoundException {
  Path file = new Path(path);
  SequenceFile.Reader reader;
  int i = 0;
  try {
    reader = new SequenceFile.Reader(conf, Reader.file(file));
    Writable key = 
        (Writable)ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable value = 
        (Writable)ReflectionUtils.newInstance(reader.getValueClass(), conf);

    while(reader.next(key, value)) {
      i++;
    }
    reader.close();
  } catch(FileNotFoundException fne){ 
    throw new FileNotFoundException();
  }catch (IOException e) {
    // TODO Auto-generated catch block
    LOG.error("Error occurred while reading file {} : ", file, 
        StringUtils.stringifyException(e));
    throw new WebApplicationException();
  } 
  return i;
}
项目:GeoCrawler    文件:LinkReader.java   
@Override
public int count(String path) throws FileNotFoundException {
  Path file = new Path(path);
  SequenceFile.Reader reader;
  int i = 0;
  try {
    reader = new SequenceFile.Reader(conf, Reader.file(file));
    Writable key = (Writable)ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable value = (Writable)ReflectionUtils.newInstance(reader.getValueClass(), conf);

    while(reader.next(key, value)) {
      i++;
    }
    reader.close();
  } catch(FileNotFoundException fne){ 
    throw new FileNotFoundException();
  }catch (IOException e) {
    // TODO Auto-generated catch block
    LOG.error("Error occurred while reading file {} : ", file, StringUtils.stringifyException(e));
    throw new WebApplicationException();
  } 
  return i;
}
项目:big-c    文件:DistCpV1.java   
static private void finalize(Configuration conf, JobConf jobconf,
    final Path destPath, String presevedAttributes) throws IOException {
  if (presevedAttributes == null) {
    return;
  }
  EnumSet<FileAttribute> preseved = FileAttribute.parse(presevedAttributes);
  if (!preseved.contains(FileAttribute.USER)
      && !preseved.contains(FileAttribute.GROUP)
      && !preseved.contains(FileAttribute.PERMISSION)) {
    return;
  }

  FileSystem dstfs = destPath.getFileSystem(conf);
  Path dstdirlist = new Path(jobconf.get(DST_DIR_LIST_LABEL));
  try (SequenceFile.Reader in =
      new SequenceFile.Reader(jobconf, Reader.file(dstdirlist))) {
    Text dsttext = new Text();
    FilePair pair = new FilePair(); 
    for(; in.next(dsttext, pair); ) {
      Path absdst = new Path(destPath, pair.output);
      updateDestStatus(pair.input, dstfs.getFileStatus(absdst),
          preseved, dstfs);
    }
  }
}
项目:big-c    文件:DistCpV1.java   
/** Check whether the file list have duplication. */
static private void checkDuplication(FileSystem fs, Path file, Path sorted,
  Configuration conf) throws IOException {
  SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs,
    new Text.Comparator(), Text.class, Text.class, conf);
  sorter.sort(file, sorted);
  try (SequenceFile.Reader in =
       new SequenceFile.Reader(conf, Reader.file(sorted))) {
    Text prevdst = null, curdst = new Text();
    Text prevsrc = null, cursrc = new Text(); 
    for(; in.next(curdst, cursrc); ) {
      if (prevdst != null && curdst.equals(prevdst)) {
        throw new DuplicationException(
          "Invalid input, there are duplicated files in the sources: "
          + prevsrc + ", " + cursrc);
      }
      prevdst = curdst;
      curdst = new Text();
      prevsrc = cursrc;
      cursrc = new Text();
    }
  }
}
项目:big-c    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:TopPI    文件:DistCache.java   
private static TIntIntMap readIntIntMap(URI[] files, Configuration conf, String token, int size) throws IOException {
    TIntIntMap map = new TIntIntHashMap(size, Constants.DEFAULT_LOAD_FACTOR, -1, -1);
    for (URI file : files) {
        if (file.getPath().contains(token)) {
            SequenceFile.Reader reader = new SequenceFile.Reader(conf, Reader.file(new Path(file)));
            IntWritable key = new IntWritable();
            IntWritable value = new IntWritable();

            while (reader.next(key, value)) {
                map.put(key.get(), value.get());
            }

            reader.close();
        }
    }

    return map;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:hadoop-EAR    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:hadoop-plus    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:big-data-lite    文件:SequenceFileRead.java   
public static void main(String[] args) throws IOException {
String uri = args[0];
       String split = args[1];
Configuration conf = new Configuration();
Path path = new Path(uri);
SequenceFile.Reader reader = null;
try {       
reader = new SequenceFile.Reader(conf, Reader.file(path));
       Text key = new Text();
       OrdImageWritable value = new OrdImageWritable();

       int num = 0;

while (reader.next(key, value)) {
           System.out.println(key.toString() + " " + value.getByteLength());
           ImageIO.write(value.getImage(), "jpg", new File("image" +split+"_" + num++ + ".jpg"));
}
} finally {
    IOUtils.closeStream(reader);
    }       
}
项目:Pinot    文件:AggregationOutputDumpTool.java   
private static void processFile(Path path) throws Exception {
  System.out.println("Processing file:" + path);
  SequenceFile.Reader reader = new SequenceFile.Reader(new Configuration(),
      Reader.file(path));
  System.out.println(reader.getKeyClass());
  System.out.println(reader.getValueClassName());
  WritableComparable<?> key = (WritableComparable<?>) reader.getKeyClass()
      .newInstance();
  Writable val = (Writable) reader.getValueClass().newInstance();

  while (reader.next(key, val)) {
    BytesWritable writable = (BytesWritable) key;
    DimensionKey dimensionKey = DimensionKey.fromBytes(writable.getBytes());
    System.out.println(dimensionKey);
  }
}
项目:hops    文件:DistCpV1.java   
static private void finalize(Configuration conf, JobConf jobconf,
    final Path destPath, String presevedAttributes) throws IOException {
  if (presevedAttributes == null) {
    return;
  }
  EnumSet<FileAttribute> preseved = FileAttribute.parse(presevedAttributes);
  if (!preseved.contains(FileAttribute.USER)
      && !preseved.contains(FileAttribute.GROUP)
      && !preseved.contains(FileAttribute.PERMISSION)) {
    return;
  }

  FileSystem dstfs = destPath.getFileSystem(conf);
  Path dstdirlist = new Path(jobconf.get(DST_DIR_LIST_LABEL));
  try (SequenceFile.Reader in =
      new SequenceFile.Reader(jobconf, Reader.file(dstdirlist))) {
    Text dsttext = new Text();
    FilePair pair = new FilePair(); 
    for(; in.next(dsttext, pair); ) {
      Path absdst = new Path(destPath, pair.output);
      updateDestStatus(pair.input, dstfs.getFileStatus(absdst),
          preseved, dstfs);
    }
  }
}
项目:hops    文件:DistCpV1.java   
/** Check whether the file list have duplication. */
static private void checkDuplication(FileSystem fs, Path file, Path sorted,
  Configuration conf) throws IOException {
  SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs,
    new Text.Comparator(), Text.class, Text.class, conf);
  sorter.sort(file, sorted);
  try (SequenceFile.Reader in =
       new SequenceFile.Reader(conf, Reader.file(sorted))) {
    Text prevdst = null, curdst = new Text();
    Text prevsrc = null, cursrc = new Text(); 
    for(; in.next(curdst, cursrc); ) {
      if (prevdst != null && curdst.equals(prevdst)) {
        throw new DuplicationException(
          "Invalid input, there are duplicated files in the sources: "
          + prevsrc + ", " + cursrc);
      }
      prevdst = curdst;
      curdst = new Text();
      prevsrc = cursrc;
      cursrc = new Text();
    }
  }
}
项目:hops    文件:TestSequenceFileSerialization.java   
@Test
public void testJavaSerialization() throws Exception {
  Path file = new Path(GenericTestUtils.getTempPath("testseqser.seq"));

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:kylin    文件:CubeStatsReader.java   
public CubeStatsResult(Path path, int precision) throws IOException {
    Configuration hadoopConf = HadoopUtil.getCurrentConfiguration();
    Option seqInput = SequenceFile.Reader.file(path);
    try (Reader reader = new SequenceFile.Reader(hadoopConf, seqInput)) {
        LongWritable key = (LongWritable) ReflectionUtils.newInstance(reader.getKeyClass(), hadoopConf);
        BytesWritable value = (BytesWritable) ReflectionUtils.newInstance(reader.getValueClass(), hadoopConf);
        while (reader.next(key, value)) {
            if (key.get() == 0L) {
                percentage = Bytes.toInt(value.getBytes());
            } else if (key.get() == -1) {
                mapperOverlapRatio = Bytes.toDouble(value.getBytes());
            } else if (key.get() == -2) {
                mapperNumber = Bytes.toInt(value.getBytes());
            } else if (key.get() > 0) {
                HLLCounter hll = new HLLCounter(precision);
                ByteArray byteArray = new ByteArray(value.getBytes());
                hll.readRegisters(byteArray.asBuffer());
                counterMap.put(key.get(), hll);
            }
        }
    }
}
项目:kylin    文件:HiveToBaseCuboidMapperPerformanceTest.java   
@Ignore("convenient trial tool for dev")
@Test
public void test() throws IOException, InterruptedException {
    Configuration hconf = HadoopUtil.getCurrentConfiguration();
    HiveToBaseCuboidMapper mapper = new HiveToBaseCuboidMapper();
    Context context = MockupMapContext.create(hconf, metadataUrl, cubeName, null);

    mapper.doSetup(context);

    Reader reader = new Reader(hconf, SequenceFile.Reader.file(srcPath));
    Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), hconf);
    Text value = new Text();

    while (reader.next(key, value)) {
        mapper.map(key, value, context);
    }

    reader.close();
}
项目:openimaj    文件:SequenceFileUtility.java   
/**
 * Return the metadata map. Read mode only.
 *
 * @return metadata
 */
public Map<Text, Text> getMetadata() {
    if (!isReader) {
        throw new UnsupportedOperationException("Cannot read metadata in write mode");
    }

    Reader reader = null;
    try {
        reader = createReader();
        final Map<Text, Text> metadata = reader.getMetadata().getMetadata();
        return metadata;
    } catch (final Exception e) {
        throw new RuntimeException(e);
    } finally {
        if (reader != null)
            try {
                reader.close();
            } catch (final IOException e1) {
            }
    }
}
项目:openimaj    文件:SequenceFileUtility.java   
/**
 * @return the compression codec in use for this file.
 */
public Class<? extends CompressionCodec> getCompressionCodecClass() {
    if (!isReader)
        return DefaultCodec.class;

    Reader reader = null;
    try {
        reader = createReader();
        if (reader.getCompressionCodec() == null)
            return null;
        return reader.getCompressionCodec().getClass();
    } catch (final Exception e) {
        throw new RuntimeException(e);
    } finally {
        if (reader != null)
            try {
                reader.close();
            } catch (final IOException e1) {
            }
    }
}
项目:pss    文件:LshMapper.java   
@Override
public void configure(JobConf job) {
    l = job.getInt(LshPartitionMain.L_PROPERTY, LshPartitionMain.L_VALUE);
    try {
        Path[] localFiles = DistributedCache.getLocalCacheFiles(job);
        // System.out.println("local:" + localFiles[0].getName());
        // FileSystem fs = localFiles[0].getFileSystem(job);
        FileSystem fs = FileSystem.get(job);
        // Reader reader = new SequenceFile.Reader(fs, localFiles[0], job);
        Reader reader = new SequenceFile.Reader(fs, new Path("lshfile"), job);
        reader.next(lsh);
        reader.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}
项目:pss    文件:SeqReader.java   
public static int readFile(Boolean printValues, FileSystem fs, Path inputPath,
        Configuration conf) throws IOException, InstantiationException, IllegalAccessException {
    int count = 0;

    Reader reader = new SequenceFile.Reader(fs, inputPath, conf);

    Writable key = (Writable) reader.getKeyClass().newInstance();
    Writable value = (Writable) reader.getValueClass().newInstance();

    System.out.println("key class:" + key.getClass().getName());
    System.out.println("value class:" + value.getClass().getName());

    while (reader.next(key, value)) {
        if (printValues)
            System.out.print("\nkey:" + key.toString() + ", value:" + value.toString());
        count++;
    }
    reader.close();
    System.out.println("\n" + inputPath.getName() + " has " + count + " records");
    return count;
}
项目:pss    文件:LshMapper.java   
@Override
public void configure(JobConf job) {
    l = job.getInt(MinHashLshDriver.L_PROPERTY, MinHashLshDriver.L_VALUE);
    try {
        Path[] localFiles = DistributedCache.getLocalCacheFiles(job);
        // System.out.println("local:" + localFiles[0].getName());
        // FileSystem fs = localFiles[0].getFileSystem(job);
        FileSystem fs = FileSystem.get(job);
        // Reader reader = new SequenceFile.Reader(fs, localFiles[0], job);
        Reader reader = new SequenceFile.Reader(fs, new Path("lshfile"), job);
        reader.next(lsh);
        reader.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}
项目:hadoop-TCP    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:Kylin    文件:BaseCuboidMapperPerformanceTest.java   
@Ignore("convenient trial tool for dev")
@Test
public void test() throws IOException, InterruptedException {
    Configuration hconf = new Configuration();
    BaseCuboidMapper mapper = new BaseCuboidMapper();
    Context context = MockupMapContext.create(hconf, metadataUrl, cubeName, null);

    mapper.setup(context);

    Reader reader = new Reader(hconf, SequenceFile.Reader.file(srcPath));
    Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), hconf);
    Text value = new Text();

    while (reader.next(key, value)) {
        mapper.map(key, value, context);
    }

    reader.close();
}
项目:hadoop-on-lustre    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:hardfs    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:hadoop-on-lustre2    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:incubator-blur    文件:SnapshotIndexDeletionPolicy.java   
private void loadGenerations() throws IOException {
  FileSystem fileSystem = _path.getFileSystem(_configuration);
  FileStatus[] listStatus = fileSystem.listStatus(_path);
  SortedSet<FileStatus> existing = new TreeSet<FileStatus>(Arrays.asList(listStatus));
  if (existing.isEmpty()) {
    return;
  }
  FileStatus last = existing.last();
  Reader reader = new SequenceFile.Reader(fileSystem, last.getPath(), _configuration);
  Text key = new Text();
  LongWritable value = new LongWritable();
  while (reader.next(key, value)) {
    String name = key.toString();
    long gen = value.get();
    _namesToGenerations.put(name, gen);
    Set<String> names = _generationsToNames.get(gen);
    if (names == null) {
      names = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
      _generationsToNames.put(gen, names);
    }
    names.add(name);
  }
  reader.close();
  existing.remove(last);
  cleanupOldFiles(fileSystem, existing);
}
项目:incubator-blur    文件:BlurInputFormatTest.java   
private void walkOutput(Path output, Configuration conf, ResultReader resultReader) throws IOException {
  FileSystem fileSystem = output.getFileSystem(conf);
  FileStatus fileStatus = fileSystem.getFileStatus(output);
  if (fileStatus.isDir()) {
    FileStatus[] listStatus = fileSystem.listStatus(output, new PathFilter() {
      @Override
      public boolean accept(Path path) {
        return !path.getName().startsWith("_");
      }
    });
    for (FileStatus fs : listStatus) {
      walkOutput(fs.getPath(), conf, resultReader);
    }
  } else {
    Reader reader = new SequenceFile.Reader(fileSystem, output, conf);
    Text rowId = new Text();
    TableBlurRecord tableBlurRecord = new TableBlurRecord();
    while (reader.next(rowId, tableBlurRecord)) {
      resultReader.read(rowId, tableBlurRecord);
    }
    reader.close();
  }
}
项目:mrgeo    文件:SequenceFileReaderBuilder.java   
public Reader build() throws IOException
{
  when(sequenceFileReader.getKeyClass()).thenReturn(keyValueHelper.getKeyClass());
  when(sequenceFileReader.getValueClass()).thenReturn(keyValueHelper.getValueClass());

  when(sequenceFileReader.next(any(Writable.class), any(Writable.class))).thenAnswer(new Answer<Boolean>()
  {
    @Override
    public Boolean answer(InvocationOnMock invocationOnMock) throws Throwable
    {
      // Get the key and value
      Object[] args = invocationOnMock.getArguments();
      Writable key = (Writable) args[0];
      Writable value = (Writable) args[1];
      return keyValueHelper.next(key, value);
    }
  });

  return sequenceFileReader;
}
项目:RDFS    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:hadoop-0.20    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:hortonworks-extension    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:hortonworks-extension    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:hadoop-gpu    文件:TestSequenceFileSerialization.java   
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
项目:hadoop-oss    文件:TestSequenceFileAppend.java   
private void verify2Values(Path file) throws IOException {
  Reader reader = new Reader(conf, Reader.file(file));
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();
}
项目:hadoop-oss    文件:TestSequenceFileAppend.java   
private void verifyAll4Values(Path file) throws IOException {
  Reader reader = new Reader(conf, Reader.file(file));
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertEquals(3L, reader.next((Object) null));
  assertEquals("three", reader.getCurrentValue((Object) null));
  assertEquals(4L, reader.next((Object) null));
  assertEquals("four", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();
}