Java 类org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl 实例源码

项目:ditb    文件:TestMajorCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Store store : r.getStores()) {
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    ((HStore)store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) {
    ((HStore)entry.getKey()).setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:LCIndex-HBase-0.94.16    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly) throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    Store store = pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(pair.getValue(), blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE : inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:LCIndex-HBase-0.94.16    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE));
  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:pbase    文件:TestMajorCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<HStore, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<HStore, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    HStore store = (HStore) pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
  }

  majorCompaction();

  // restore settings
  for (Entry<HStore, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:HIndex    文件:TestMajorCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<HStore, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<HStore, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    HStore store = (HStore) pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
  }

  majorCompaction();

  // restore settings
  for (Entry<HStore, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:IRIndex    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    Store store = pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(pair.getValue(), blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(
        onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:IRIndex    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE));
  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:hbase    文件:TestMajorCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<HStore, HFileDataBlockEncoder> replaceBlockCache = new HashMap<>();
  for (HStore store : r.getStores()) {
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    ((HStore)store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
  }

  majorCompaction();

  // restore settings
  for (Entry<HStore, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) {
    ((HStore)entry.getKey()).setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:PyroDB    文件:TestMajorCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<HStore, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<HStore, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    HStore store = (HStore) pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
  }

  majorCompaction();

  // restore settings
  for (Entry<HStore, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:c5    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<HStore, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<HStore, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    HStore store = (HStore) pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
  }

  majorCompaction();

  // restore settings
  for (Entry<HStore, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:HBase-FastTableCopy    文件:CopyDataFromSmallToLargeTableMain.java   
private void prepValueForStoreFileWriter(Configuration conf,
    String columnFamily, HTableDescriptor largeTD,
    String outputPath) throws IOException {
  familyDescriptor = largeTD.getFamily(Bytes
      .toBytes(columnFamily));

  blocksize = familyDescriptor.getBlocksize();

  fs = FileSystem.get(conf);
  this.columnFamily =columnFamily;  
  cacheConf = new CacheConfig(conf);
  compression = familyDescriptor.getCompression();
  bloomFilterType = familyDescriptor.getBloomFilterType();
  dataBlockEncoder = new HFileDataBlockEncoderImpl(
      familyDescriptor.getDataBlockEncodingOnDisk(),
      familyDescriptor.getDataBlockEncoding());
  this.outputPath = outputPath;
}
项目:HBase-Research    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    Store store = pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(pair.getValue(), blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(
        onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:HBase-Research    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE));
  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:hbase-0.94.8-qod    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    Store store = pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(pair.getValue(), blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(
        onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:hbase-0.94.8-qod    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE));
  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:hbase-0.94.8-qod    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    Store store = pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(pair.getValue(), blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(
        onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:hbase-0.94.8-qod    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE));
  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:DominoHBase    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<HStore, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<HStore, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    HStore store = (HStore) pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(
        onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<HStore, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:DominoHBase    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:hindex    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    Store store = pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(pair.getValue(), blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(
        onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:hindex    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE));
  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:ditb    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
      .withChecksumType(CKTYPE)
      .withBytesPerCheckSum(CKBYTES)
      .withDataBlockEncoding(dataBlockEncoderAlgo)
      .build();
  // Make a store file and write data to it.
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
          .withFilePath(path)
          .withMaxKeyCount(2000)
          .withFileContext(meta)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
    cacheConf, BloomType.NONE);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:LCIndex-HBase-0.94.16    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo,
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HFile.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE, dataBlockEncoder);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);

  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:pbase    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
      .withChecksumType(CKTYPE)
      .withBytesPerCheckSum(CKBYTES)
      .withDataBlockEncoding(dataBlockEncoderAlgo)
      .build();
  // Make a store file and write data to it.
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
          .withFilePath(path)
          .withMaxKeyCount(2000)
          .withFileContext(meta)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
    cacheConf, BloomType.NONE);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:HIndex    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
      .withChecksumType(CKTYPE)
      .withBytesPerCheckSum(CKBYTES)
      .withDataBlockEncoding(dataBlockEncoderAlgo)
      .build();
  // Make a store file and write data to it.
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
          .withFilePath(path)
          .withMaxKeyCount(2000)
          .withFileContext(meta)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
    cacheConf, BloomType.NONE);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:IRIndex    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo,
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HFile.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE, dataBlockEncoder);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);

  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:hbase    文件:TestHStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
@Test
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
      .withChecksumType(CKTYPE)
      .withBytesPerCheckSum(CKBYTES)
      .withDataBlockEncoding(dataBlockEncoderAlgo)
      .build();
  // Make a store file and write data to it.
  StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
          .withFilePath(path)
          .withMaxKeyCount(2000)
          .withFileContext(meta)
          .build();
  writer.close();

  HStoreFile storeFile =
      new HStoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
  storeFile.initReader();
  StoreFileReader reader = storeFile.getReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:PyroDB    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
      .withChecksumType(CKTYPE)
      .withBytesPerCheckSum(CKBYTES)
      .withDataBlockEncoding(dataBlockEncoderAlgo)
      .build();
  // Make a store file and write data to it.
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
          .withFilePath(path)
          .withMaxKeyCount(2000)
          .withFileContext(meta)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
    cacheConf, BloomType.NONE);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:c5    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HConstants.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:HBase-Research    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo,
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HFile.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE, dataBlockEncoder);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);

  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:hbase-0.94.8-qod    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo,
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HFile.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE, dataBlockEncoder);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);

  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:hbase-0.94.8-qod    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo,
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HFile.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE, dataBlockEncoder);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);

  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:DominoHBase    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  Path dir = new Path(new Path(this.testDir, "regionname"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo,
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HFile.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE, dataBlockEncoder);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);

  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:hindex    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo,
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HFile.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE, dataBlockEncoder);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);

  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:c5    文件:HStore.java   
/**
 * Constructor
 * @param region
 * @param family HColumnDescriptor for this column
 * @param confParam configuration object
 * failed.  Can be null.
 * @throws IOException
 */
protected HStore(final HRegion region, final HColumnDescriptor family,
    final Configuration confParam) throws IOException {

  HRegionInfo info = region.getRegionInfo();
  this.fs = region.getRegionFileSystem();

  // Assemble the store's home directory and Ensure it exists.
  fs.createStoreDir(family.getNameAsString());
  this.region = region;
  this.family = family;
  // 'conf' renamed to 'confParam' b/c we use this.conf in the constructor
  // CompoundConfiguration will look for keys in reverse order of addition, so we'd
  // add global config first, then table and cf overrides, then cf metadata.
  this.conf = new CompoundConfiguration()
    .add(confParam)
    .addStringMap(region.getTableDesc().getConfiguration())
    .addStringMap(family.getConfiguration())
    .addWritableMap(family.getValues());
  this.blocksize = family.getBlocksize();

  this.dataBlockEncoder =
      new HFileDataBlockEncoderImpl(family.getDataBlockEncoding());

  this.comparator = info.getComparator();
  // used by ScanQueryMatcher
  long timeToPurgeDeletes =
      Math.max(conf.getLong("hbase.hstore.time.to.purge.deletes", 0), 0);
  LOG.trace("Time to purge deletes set to " + timeToPurgeDeletes +
      "ms in store " + this);
  // Get TTL
  long ttl = determineTTLFromFamily(family);
  // Why not just pass a HColumnDescriptor in here altogether?  Even if have
  // to clone it?
  scanInfo = new ScanInfo(family, ttl, timeToPurgeDeletes, this.comparator);
  this.memstore = new MemStore(conf, this.comparator);
  this.offPeakHours = OffPeakHours.getInstance(conf);

  // Setting up cache configuration for this family
  this.cacheConf = new CacheConfig(conf, family);

  this.verifyBulkLoads = conf.getBoolean("hbase.hstore.bulkload.verify", false);

  this.blockingFileCount =
      conf.getInt(BLOCKING_STOREFILES_KEY, DEFAULT_BLOCKING_STOREFILE_COUNT);
  this.compactionCheckMultiplier = conf.getInt(
      COMPACTCHECKER_INTERVAL_MULTIPLIER_KEY, DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER);
  if (this.compactionCheckMultiplier <= 0) {
    LOG.error("Compaction check period multiplier must be positive, setting default: "
        + DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER);
    this.compactionCheckMultiplier = DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER;
  }

  if (HStore.closeCheckInterval == 0) {
    HStore.closeCheckInterval = conf.getInt(
        "hbase.hstore.close.check.interval", 10*1000*1000 /* 10 MB */);
  }

  this.storeEngine = StoreEngine.create(this, this.conf, this.comparator);
  this.storeEngine.getStoreFileManager().loadFiles(loadStoreFiles());

  // Initialize checksum type from name. The names are CRC32, CRC32C, etc.
  this.checksumType = getChecksumType(conf);
  // initilize bytes per checksum
  this.bytesPerChecksum = getBytesPerChecksum(conf);
  flushRetriesNumber = conf.getInt(
      "hbase.hstore.flush.retries.number", DEFAULT_FLUSH_RETRIES_NUMBER);
  pauseTime = conf.getInt(HConstants.HBASE_SERVER_PAUSE, HConstants.DEFAULT_HBASE_SERVER_PAUSE);
  if (flushRetriesNumber <= 0) {
    throw new IllegalArgumentException(
        "hbase.hstore.flush.retries.number must be > 0, not "
            + flushRetriesNumber);
  }
}
项目:DominoHBase    文件:HStore.java   
/**
 * Constructor
 * @param basedir qualified path under which the region directory lives;
 * generally the table subdirectory
 * @param region
 * @param family HColumnDescriptor for this column
 * @param fs file system object
 * @param confParam configuration object
 * failed.  Can be null.
 * @throws IOException
 */
protected HStore(Path basedir, HRegion region, HColumnDescriptor family,
    FileSystem fs, Configuration confParam)
throws IOException {

  HRegionInfo info = region.getRegionInfo();
  this.fs = fs;
  // Assemble the store's home directory.
  Path p = getStoreHomedir(basedir, info.getEncodedName(), family.getName());
  // Ensure it exists.
  this.homedir = createStoreHomeDir(this.fs, p);
  this.region = region;
  this.family = family;
  // 'conf' renamed to 'confParam' b/c we use this.conf in the constructor
  this.conf = new CompoundConfiguration()
    .add(confParam)
    .add(family.getValues());
  this.blocksize = family.getBlocksize();

  this.dataBlockEncoder =
      new HFileDataBlockEncoderImpl(family.getDataBlockEncodingOnDisk(),
          family.getDataBlockEncoding());

  this.comparator = info.getComparator();
  // Get TTL
  this.ttl = determineTTLFromFamily(family);
  // used by ScanQueryMatcher
  long timeToPurgeDeletes =
      Math.max(conf.getLong("hbase.hstore.time.to.purge.deletes", 0), 0);
  LOG.trace("Time to purge deletes set to " + timeToPurgeDeletes +
      "ms in store " + this);
  // Why not just pass a HColumnDescriptor in here altogether?  Even if have
  // to clone it?
  scanInfo = new ScanInfo(family, ttl, timeToPurgeDeletes, this.comparator);
  this.memstore = new MemStore(conf, this.comparator);

  // Setting up cache configuration for this family
  this.cacheConf = new CacheConfig(conf, family);
  this.blockingStoreFileCount =
    conf.getInt("hbase.hstore.blockingStoreFiles", 7);

  this.verifyBulkLoads = conf.getBoolean("hbase.hstore.bulkload.verify", false);

  if (HStore.closeCheckInterval == 0) {
    HStore.closeCheckInterval = conf.getInt(
        "hbase.hstore.close.check.interval", 10*1000*1000 /* 10 MB */);
  }
  this.storefiles = sortAndClone(loadStoreFiles());

  // Initialize checksum type from name. The names are CRC32, CRC32C, etc.
  this.checksumType = getChecksumType(conf);
  // initilize bytes per checksum
  this.bytesPerChecksum = getBytesPerChecksum(conf);
  // Create a compaction tool instance
  this.compactor = new Compactor(conf);
  // Create a compaction manager.
  this.compactionPolicy = new CompactionPolicy(conf, this);
}