@Override public InternalScanner preFlushScannerOpen( final ObserverContext<RegionCoprocessorEnvironment> c, Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException { Long newTtl = ttls.get(store.getTableName()); if (newTtl != null) { System.out.println("PreFlush:" + newTtl); } Integer newVersions = versions.get(store.getTableName()); ScanInfo oldSI = store.getScanInfo(); HColumnDescriptor family = store.getFamily(); ScanInfo scanInfo = new ScanInfo(TEST_UTIL.getConfiguration(), family.getName(), family.getMinVersions(), newVersions == null ? family.getMaxVersions() : newVersions, newTtl == null ? oldSI.getTtl() : newTtl, family.getKeepDeletedCells(), oldSI.getTimeToPurgeDeletes(), oldSI.getComparator()); Scan scan = new Scan(); scan.setMaxVersions(newVersions == null ? oldSI.getMaxVersions() : newVersions); return new StoreScanner(store, scanInfo, scan, Collections.singletonList(memstoreScanner), ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP); }
@Override public InternalScanner preCompactScannerOpen( final ObserverContext<RegionCoprocessorEnvironment> c, Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs, InternalScanner s) throws IOException { Long newTtl = ttls.get(store.getTableName()); Integer newVersions = versions.get(store.getTableName()); ScanInfo oldSI = store.getScanInfo(); HColumnDescriptor family = store.getFamily(); ScanInfo scanInfo = new ScanInfo(TEST_UTIL.getConfiguration(), family.getName(), family.getMinVersions(), newVersions == null ? family.getMaxVersions() : newVersions, newTtl == null ? oldSI.getTtl() : newTtl, family.getKeepDeletedCells(), oldSI.getTimeToPurgeDeletes(), oldSI.getComparator()); Scan scan = new Scan(); scan.setMaxVersions(newVersions == null ? oldSI.getMaxVersions() : newVersions); return new StoreScanner(store, scanInfo, scan, scanners, scanType, store.getSmallestReadPoint(), earliestPutTs); }
@Override public KeyValueScanner preStoreScannerOpen( final ObserverContext<RegionCoprocessorEnvironment> c, Store store, final Scan scan, final NavigableSet<byte[]> targetCols, KeyValueScanner s) throws IOException { TableName tn = store.getTableName(); if (!tn.isSystemTable()) { Long newTtl = ttls.get(store.getTableName()); Integer newVersions = versions.get(store.getTableName()); ScanInfo oldSI = store.getScanInfo(); HColumnDescriptor family = store.getFamily(); ScanInfo scanInfo = new ScanInfo(TEST_UTIL.getConfiguration(), family.getName(), family.getMinVersions(), newVersions == null ? family.getMaxVersions() : newVersions, newTtl == null ? oldSI.getTtl() : newTtl, family.getKeepDeletedCells(), oldSI.getTimeToPurgeDeletes(), oldSI.getComparator()); return new StoreScanner(store, scanInfo, scan, targetCols, ((HStore) store).getHRegion().getReadpoint(IsolationLevel.READ_COMMITTED)); } else { return s; } }
@Test public void testPurgeExpiredFiles() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.setInt(HStore.BLOCKING_STOREFILES_KEY, 10000); TEST_UTIL.startMiniCluster(1); try { Store store = prepareData(); assertEquals(10, store.getStorefilesCount()); TEST_UTIL.getHBaseAdmin().majorCompact(tableName); while (store.getStorefilesCount() > 1) { Thread.sleep(100); } assertTrue(store.getStorefilesCount() == 1); } finally { TEST_UTIL.shutdownMiniCluster(); } }
private Store prepareData() throws IOException { HBaseAdmin admin = TEST_UTIL.getHBaseAdmin(); if (admin.tableExists(tableName)) { admin.disableTable(tableName); admin.deleteTable(tableName); } HTable table = TEST_UTIL.createTable(tableName, family); Random rand = new Random(); for (int i = 0; i < 10; i++) { for (int j = 0; j < 10; j++) { byte[] value = new byte[128 * 1024]; rand.nextBytes(value); table.put(new Put(Bytes.toBytes(i * 10 + j)).add(family, qualifier, value)); } admin.flush(tableName); } return getStoreWithName(tableName); }
private long testCompactionWithoutThroughputLimit() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY, DefaultStoreEngine.class.getName()); conf.setInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MIN_KEY, 100); conf.setInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MAX_KEY, 200); conf.setInt(HStore.BLOCKING_STOREFILES_KEY, 10000); conf.set(CompactionThroughputControllerFactory.HBASE_THROUGHPUT_CONTROLLER_KEY, NoLimitCompactionThroughputController.class.getName()); TEST_UTIL.startMiniCluster(1); try { Store store = prepareData(); assertEquals(10, store.getStorefilesCount()); long startTime = System.currentTimeMillis(); TEST_UTIL.getHBaseAdmin().majorCompact(tableName); while (store.getStorefilesCount() != 1) { Thread.sleep(20); } return System.currentTimeMillis() - startTime; } finally { TEST_UTIL.shutdownMiniCluster(); } }
@Override public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs, InternalScanner s, CompactionRequest request) throws IOException { // Get the latest tx snapshot state for the compaction TransactionVisibilityState snapshot = cache.getLatestState(); // Record tx state before the compaction if (compactionState != null) { compactionState.record(request, snapshot); } // Also make sure to use the same snapshot for the compaction return createStoreScanner(c.getEnvironment(), "compaction", snapshot, store, scanners, scanType, earliestPutTs); }
protected InternalScanner createStoreScanner(RegionCoprocessorEnvironment env, String action, TransactionVisibilityState snapshot, Store store, List<? extends KeyValueScanner> scanners, ScanType type, long earliestPutTs) throws IOException { if (snapshot == null) { if (LOG.isDebugEnabled()) { LOG.debug("Region " + env.getRegion().getRegionInfo().getRegionNameAsString() + ", no current transaction state found, defaulting to normal " + action + " scanner"); } return null; } // construct a dummy transaction from the latest snapshot Transaction dummyTx = TxUtils.createDummyTransaction(snapshot); Scan scan = new Scan(); // need to see all versions, since we filter out excludes and applications may rely on multiple versions scan.setMaxVersions(); scan.setFilter( new IncludeInProgressFilter(dummyTx.getVisibilityUpperBound(), snapshot.getInvalid(), getTransactionFilter(dummyTx, type, null))); return new StoreScanner(store, store.getScanInfo(), scan, scanners, type, store.getSmallestReadPoint(), earliestPutTs); }
protected InternalScanner createStoreScanner(RegionCoprocessorEnvironment env, String action, TransactionVisibilityState snapshot, Store store, List<? extends KeyValueScanner> scanners, ScanType type, long earliestPutTs) throws IOException { if (snapshot == null) { if (LOG.isDebugEnabled()) { LOG.debug("Region " + env.getRegion().getRegionNameAsString() + ", no current transaction state found, defaulting to normal " + action + " scanner"); } return null; } // construct a dummy transaction from the latest snapshot Transaction dummyTx = TxUtils.createDummyTransaction(snapshot); Scan scan = new Scan(); // need to see all versions, since we filter out excludes and applications may rely on multiple versions scan.setMaxVersions(); scan.setFilter( new IncludeInProgressFilter(dummyTx.getVisibilityUpperBound(), snapshot.getInvalid(), getTransactionFilter(dummyTx, type, null))); return new StoreScanner(store, store.getScanInfo(), scan, scanners, type, store.getSmallestReadPoint(), earliestPutTs); }
@Override public InternalScanner preFlushScannerOpen( final ObserverContext<RegionCoprocessorEnvironment> c, Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException { Long newTtl = ttls.get(store.getTableName()); if (newTtl != null) { System.out.println("PreFlush:" + newTtl); } Integer newVersions = versions.get(store.getTableName()); ScanInfo oldSI = store.getScanInfo(); HColumnDescriptor family = store.getFamily(); ScanInfo scanInfo = new ScanInfo(family.getName(), family.getMinVersions(), newVersions == null ? family.getMaxVersions() : newVersions, newTtl == null ? oldSI.getTtl() : newTtl, family.getKeepDeletedCells(), oldSI.getTimeToPurgeDeletes(), oldSI.getComparator()); Scan scan = new Scan(); scan.setMaxVersions(newVersions == null ? oldSI.getMaxVersions() : newVersions); return new StoreScanner(store, scanInfo, scan, Collections.singletonList(memstoreScanner), ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP); }
@Override public KeyValueScanner preStoreScannerOpen( final ObserverContext<RegionCoprocessorEnvironment> c, Store store, final Scan scan, final NavigableSet<byte[]> targetCols, KeyValueScanner s) throws IOException { TableName tn = store.getTableName(); if (!tn.isSystemTable()) { Long newTtl = ttls.get(store.getTableName()); Integer newVersions = versions.get(store.getTableName()); ScanInfo oldSI = store.getScanInfo(); HColumnDescriptor family = store.getFamily(); ScanInfo scanInfo = new ScanInfo(family.getName(), family.getMinVersions(), newVersions == null ? family.getMaxVersions() : newVersions, newTtl == null ? oldSI.getTtl() : newTtl, family.getKeepDeletedCells(), oldSI.getTimeToPurgeDeletes(), oldSI.getComparator()); return new StoreScanner(store, scanInfo, scan, targetCols, ((HStore) store).getHRegion().getReadpoint(IsolationLevel.READ_COMMITTED)); } else { return s; } }
@Override public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException { Long newTtl = ttls.get(store.getTableName()); if (newTtl != null) { System.out.println("PreFlush:" + newTtl); } Integer newVersions = versions.get(store.getTableName()); Store.ScanInfo oldSI = store.getScanInfo(); HColumnDescriptor family = store.getFamily(); Store.ScanInfo scanInfo = new Store.ScanInfo(family.getName(), family.getMinVersions(), newVersions == null ? family.getMaxVersions() : newVersions, newTtl == null ? oldSI.getTtl() : newTtl, family.getKeepDeletedCells(), oldSI.getTimeToPurgeDeletes(), oldSI.getComparator()); Scan scan = new Scan(); scan.setMaxVersions(newVersions == null ? oldSI.getMaxVersions() : newVersions); return new StoreScanner(store, scanInfo, scan, Collections.singletonList(memstoreScanner), ScanType.MINOR_COMPACT, store.getHRegion().getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP); }
@Override public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e, final Store store, final InternalScanner scanner, final ScanType scanType) throws IOException { requirePermission("compact", getTableName(e.getEnvironment()), null, null, Action.ADMIN, Action.CREATE); return scanner; }
@Override public InternalScanner preCompactScannerOpen( final ObserverContext<RegionCoprocessorEnvironment> c, final Store store, List<? extends KeyValueScanner> scanners, final ScanType scanType, final long earliestPutTs, final InternalScanner s) throws IOException { return s; }
@Override public InternalScanner preCompactScannerOpen( final ObserverContext<RegionCoprocessorEnvironment> c, final Store store, List<? extends KeyValueScanner> scanners, final ScanType scanType, final long earliestPutTs, final InternalScanner s, CompactionRequest request) throws IOException { return preCompactScannerOpen(c, store, scanners, scanType, earliestPutTs, s); }
/** * Creates a 'manifest' for the specified region, by reading directly from the HRegion object. * This is used by the "online snapshot" when the table is enabled. */ public void addRegion(final HRegion region) throws IOException { // 0. Get the ManifestBuilder/RegionVisitor RegionVisitor visitor = createRegionVisitor(desc); // 1. dump region meta info into the snapshot directory LOG.debug("Storing '" + region + "' region-info for snapshot."); Object regionData = visitor.regionOpen(region.getRegionInfo()); monitor.rethrowException(); // 2. iterate through all the stores in the region LOG.debug("Creating references for hfiles"); for (Store store : region.getStores()) { // 2.1. build the snapshot reference for the store Object familyData = visitor.familyOpen(regionData, store.getFamily().getName()); monitor.rethrowException(); List<StoreFile> storeFiles = new ArrayList<StoreFile>(store.getStorefiles()); if (LOG.isDebugEnabled()) { LOG.debug("Adding snapshot references for " + storeFiles + " hfiles"); } // 2.2. iterate through all the store's files and create "references". for (int i = 0, sz = storeFiles.size(); i < sz; i++) { StoreFile storeFile = storeFiles.get(i); monitor.rethrowException(); // create "reference" to this store file. LOG.debug("Adding reference for file (" + (i+1) + "/" + sz + "): " + storeFile.getPath()); visitor.storeFile(regionData, familyData, storeFile.getFileInfo()); } visitor.familyClose(regionData, familyData); } visitor.regionClose(regionData); }
Compactor(final Configuration conf, final Store store) { this.conf = conf; this.store = store; this.compactionKVMax = this.conf.getInt(HConstants.COMPACTION_KV_MAX, HConstants.COMPACTION_KV_MAX_DEFAULT); this.compactionCompression = (this.store.getFamily() == null) ? Compression.Algorithm.NONE : this.store.getFamily().getCompactionCompression(); this.keepSeqIdPeriod = Math.max(this.conf.getInt(HConstants.KEEP_SEQID_PERIOD, HConstants.MIN_KEEP_SEQID_PERIOD), HConstants.MIN_KEEP_SEQID_PERIOD); }
@Test public void testFlushedFileWithVisibilityTags() throws Exception { final byte[] qual2 = Bytes.toBytes("qual2"); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); HTableDescriptor desc = new HTableDescriptor(tableName); HColumnDescriptor col = new HColumnDescriptor(fam); desc.addFamily(col); TEST_UTIL.getHBaseAdmin().createTable(desc); try (Table table = TEST_UTIL.getConnection().getTable(tableName)) { Put p1 = new Put(row1); p1.add(fam, qual, value); p1.setCellVisibility(new CellVisibility(CONFIDENTIAL)); Put p2 = new Put(row1); p2.add(fam, qual2, value); p2.setCellVisibility(new CellVisibility(SECRET)); RowMutations rm = new RowMutations(row1); rm.add(p1); rm.add(p2); table.mutateRow(rm); } TEST_UTIL.getHBaseAdmin().flush(tableName); List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(tableName); Store store = regions.get(0).getStore(fam); Collection<StoreFile> storefiles = store.getStorefiles(); assertTrue(storefiles.size() > 0); for (StoreFile storeFile : storefiles) { assertTrue(storeFile.getReader().getHFileReader().getFileContext().isIncludesTags()); } }
@Override public boolean compact(CompactionContext compaction, Store store, CompactionThroughputController throughputController) throws IOException { try { return super.compact(compaction, store, throughputController); } finally { compactCount++; } }
@Override public boolean compact(CompactionContext compaction, Store store, CompactionThroughputController throughputController, User user) throws IOException { try { return super.compact(compaction, store, throughputController, user); } finally { compactCount++; } }
public int countStoreFiles() { int count = 0; for (Store store : stores.values()) { count += store.getStorefilesCount(); } return count; }
private void waitForStoreFileCount(Store store, int count, int timeout) throws InterruptedException { long start = System.currentTimeMillis(); while (start + timeout > System.currentTimeMillis() && store.getStorefilesCount() != count) { Thread.sleep(100); } System.out.println("start=" + start + ", now=" + System.currentTimeMillis() + ", cur=" + store.getStorefilesCount()); assertEquals(count, store.getStorefilesCount()); }
private void loadFlushAndCompact(Region region, byte[] family) throws IOException { // create two hfiles in the region createHFileInRegion(region, family); createHFileInRegion(region, family); Store s = region.getStore(family); int count = s.getStorefilesCount(); assertTrue("Don't have the expected store files, wanted >= 2 store files, but was:" + count, count >= 2); // compact the two files into one file to get files in the archive LOG.debug("Compacting stores"); region.compact(true); }
@Override public void postFlush(ObserverContext<RegionCoprocessorEnvironment> c, Store store, StoreFile resultFile) throws IOException { ctPostFlush.incrementAndGet(); if (throwOnPostFlush.get()){ throw new IOException("throwOnPostFlush is true in postFlush"); } }
@Override public InternalScanner preCompactScannerOpen( final ObserverContext<RegionCoprocessorEnvironment> c, Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs, InternalScanner s) throws IOException { ctPreCompactScanner.incrementAndGet(); return null; }
@Override public KeyValueScanner preStoreScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, final Store store, final Scan scan, final NavigableSet<byte[]> targetCols, final KeyValueScanner s) throws IOException { ctPreStoreScannerOpen.incrementAndGet(); return null; }
@Override public KeyValueScanner preStoreScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Store store, Scan scan, NavigableSet<byte[]> targetCols, KeyValueScanner s) throws IOException { scan.setFilter(new NoDataFilter()); return new StoreScanner(store, store.getScanInfo(), scan, targetCols, ((HStore)store).getHRegion().getReadpoint(IsolationLevel.READ_COMMITTED)); }
@Override public InternalScanner preFlushScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException { Scan scan = new Scan(); scan.setFilter(new NoDataFilter()); return new StoreScanner(store, store.getScanInfo(), scan, Collections.singletonList(memstoreScanner), ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP); }
@Override public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs, InternalScanner s) throws IOException { Scan scan = new Scan(); scan.setFilter(new NoDataFilter()); return new StoreScanner(store, store.getScanInfo(), scan, scanners, ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP); }