@Override public void readFully(TreeMap<Slice, Slice> data) throws IOException { scanner.rewind(); for (; !scanner.atEnd(); scanner.advance()) { Entry en = scanner.entry(); int klen = en.getKeyLength(); int vlen = en.getValueLength(); byte[] key = new byte[klen]; byte[] value = new byte[vlen]; en.getKey(key); en.getValue(value); data.put(new Slice(key, 0, key.length), new Slice(value, 0, value.length)); } }
@Override public boolean peek(Slice key, Slice value) throws IOException { if (scanner.atEnd()) { return false; } Entry en = scanner.entry(); byte[] rkey = new byte[en.getKeyLength()]; byte[] rval = new byte[en.getValueLength()]; en.getKey(rkey); en.getValue(rval); key.buffer = rkey; key.offset = 0; key.length = en.getKeyLength(); value.buffer = rval; value.offset = 0; value.length = en.getValueLength(); return true; }
@AfterClass public static void summary() throws Exception { long heapMax = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax(); long nonHeapMax = ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage().getMax(); logger.info("=============================================================================="); logger.info("Test Size: " + String.format("%,d", testSize) + " pairs (" + String.format("%,d", keySizeBytes) + " key bytes /" + String.format("%,d", valueSizeBytes) + " value bytes)"); logger.info("Memory: " + String.format("%,d", heapMax) + " Heap MAX + " + String.format("%,d", nonHeapMax) + " Non-Heap Max = " + String.format("%,d", (heapMax + nonHeapMax)) + " Total MAX"); logger.info("=============================================================================="); logger.info("KV PAIRS (" + keySizeBytes + "/" + valueSizeBytes + "), " + "TEST ID, ELAPSED TIME (μs/microseconds), FILE SIZE (bytes)"); Iterator<?> it = testSummary.entrySet().iterator(); while (it.hasNext()) { Map.Entry<?,?> kv = (Map.Entry<?,?>)it.next(); logger.info(kv.getKey() + "," + kv.getValue()); } }
private void readTFileSeqId(Path file) throws IOException { FSDataInputStream in = hdfs.open(file); long size = hdfs.getContentSummary(file).getLength(); TFile.Reader reader = new TFile.Reader(in, size, new Configuration()); Scanner scanner = reader.createScanner(); scanner.rewind(); for (int i = 0; i < testSize; i++) { scanner.seekTo(getKey(i).getBytes()); Entry en = scanner.entry(); en.get(new BytesWritable(new byte[en.getKeyLength()]), new BytesWritable(new byte[en.getValueLength()])); } reader.close(); }
private void readTFileSeq(Path file) throws IOException { FSDataInputStream in = hdfs.open(file); long size = hdfs.getContentSummary(file).getLength(); TFile.Reader reader = new TFile.Reader(in, size, new Configuration()); Scanner scanner = reader.createScanner(); scanner.rewind(); do { Entry en = scanner.entry(); en.get(new BytesWritable(new byte[en.getKeyLength()]), new BytesWritable(new byte[en.getValueLength()])); } while (scanner.advance() && !scanner.atEnd()); reader.close(); }
private void readDTFileSeq(Path file) throws IOException { FSDataInputStream in = hdfs.open(file); long size = hdfs.getContentSummary(file).getLength(); org.apache.hadoop.io.file.tfile.DTFile.Reader reader = new org.apache.hadoop.io.file.tfile.DTFile.Reader(in, size, new Configuration()); org.apache.hadoop.io.file.tfile.DTFile.Reader.Scanner scanner = reader.createScanner(); scanner.rewind(); do { org.apache.hadoop.io.file.tfile.DTFile.Reader.Scanner.Entry en = scanner.entry(); en.getBlockBuffer(); en.getKeyOffset(); en.getKeyLength(); en.getValueLength(); en.getValueOffset(); } while (scanner.advance() && !scanner.atEnd()); reader.close(); }
private void readDTFileRandom(Path file) throws IOException { Random random = new Random(); FSDataInputStream in = hdfs.open(file); long size = hdfs.getContentSummary(file).getLength(); org.apache.hadoop.io.file.tfile.DTFile.Reader reader = new org.apache.hadoop.io.file.tfile.DTFile.Reader(in, size, new Configuration()); org.apache.hadoop.io.file.tfile.DTFile.Reader.Scanner scanner = reader.createScanner(); scanner.rewind(); for (int i = 0; i < testSize; i++) { scanner.seekTo(getKey(random.nextInt(testSize)).getBytes()); org.apache.hadoop.io.file.tfile.DTFile.Reader.Scanner.Entry en = scanner.entry(); en.getBlockBuffer(); en.getKeyOffset(); en.getKeyLength(); en.getValueLength(); en.getValueOffset(); } reader.close(); }
private void readDTFileSeqId(Path file) throws IOException { FSDataInputStream in = hdfs.open(file); long size = hdfs.getContentSummary(file).getLength(); org.apache.hadoop.io.file.tfile.DTFile.Reader reader = new org.apache.hadoop.io.file.tfile.DTFile.Reader(in, size, new Configuration()); org.apache.hadoop.io.file.tfile.DTFile.Reader.Scanner scanner = reader.createScanner(); scanner.rewind(); for (int i = 0; i < testSize; i++) { scanner.seekTo(getKey(i).getBytes()); org.apache.hadoop.io.file.tfile.DTFile.Reader.Scanner.Entry en = scanner.entry(); en.getBlockBuffer(); en.getKeyOffset(); en.getKeyLength(); en.getValueLength(); en.getValueOffset(); } reader.close(); }
@Override public boolean next() throws IOException { if (scanner.atEnd()) return false; Entry entry = scanner.entry(); keyLength = entry.getKeyLength(); checkKeyBuffer(keyLength); entry.getKey(keyBuffer); valueLength = entry.getValueLength(); checkValueBuffer(valueLength); entry.getValue(valueBuffer); scanner.advance(); return true; }
public boolean next() throws IOException { if (scanner.atEnd()) return false; Entry entry = scanner.entry(); keyLength = entry.getKeyLength(); checkKeyBuffer(keyLength); entry.getKey(keyBuffer); valueLength = entry.getValueLength(); checkValueBuffer(valueLength); entry.getValue(valueBuffer); scanner.advance(); return true; }