@BeforeMethod(alwaysRun = true) public void setUp() { conf = new Configuration(); conf.setIfUnset(MRJobConstants.TO_INTERMEDIATE_DATA_FORMAT, CSVIntermediateDataFormat.class.getName()); jobContextMock = mock(TaskAttemptContext.class); GenericCounter counter = new GenericCounter("test", "test-me"); when(((TaskAttemptContext) jobContextMock).getCounter(SqoopCounters.ROWS_WRITTEN)).thenReturn(counter); org.apache.hadoop.mapred.JobConf testConf = new org.apache.hadoop.mapred.JobConf(); when(jobContextMock.getConfiguration()).thenReturn(testConf); }
@Override public void open() throws HyracksDataException { first = true; groupStarted = false; group = new ArrayList<>(); bPtr = 0; group.add(new VSizeFrame(ctx)); fta = new FrameTupleAppender(); keyCounter = new GenericCounter(); valueCounter = new GenericCounter(); }
public Counter() { this(new GenericCounter()); }
@Override protected Counter newCounter(String counterName, String displayName, long value) { return new Counter(new GenericCounter(counterName, displayName, value)); }
@Override protected Counter newCounter(String name, String displayName, long value) { return new GenericCounter(name, displayName, value); }
@Override protected Counter newCounter() { return new GenericCounter(); }
@Test (timeout=3000) public void testLoadJobLoadReducer() throws Exception { LoadJob.LoadReducer test = new LoadJob.LoadReducer(); Configuration conf = new Configuration(); conf.setInt(JobContext.NUM_REDUCES, 2); CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true); conf.setBoolean(FileOutputFormat.COMPRESS, true); CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true); conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true); TaskAttemptID taskid = new TaskAttemptID(); RawKeyValueIterator input = new FakeRawKeyValueIterator(); Counter counter = new GenericCounter(); Counter inputValueCounter = new GenericCounter(); LoadRecordWriter output = new LoadRecordWriter(); OutputCommitter committer = new CustomOutputCommitter(); StatusReporter reporter = new DummyReporter(); RawComparator<GridmixKey> comparator = new FakeRawComparator(); ReduceContext<GridmixKey, GridmixRecord, NullWritable, GridmixRecord> reduceContext = new ReduceContextImpl<GridmixKey, GridmixRecord, NullWritable, GridmixRecord>( conf, taskid, input, counter, inputValueCounter, output, committer, reporter, comparator, GridmixKey.class, GridmixRecord.class); // read for previous data reduceContext.nextKeyValue(); org.apache.hadoop.mapreduce.Reducer<GridmixKey, GridmixRecord, NullWritable, GridmixRecord>.Context context = new WrappedReducer<GridmixKey, GridmixRecord, NullWritable, GridmixRecord>() .getReducerContext(reduceContext); // test.setup(context); test.run(context); // have been readed 9 records (-1 for previous) assertEquals(9, counter.getValue()); assertEquals(10, inputValueCounter.getValue()); assertEquals(1, output.getData().size()); GridmixRecord record = output.getData().values().iterator() .next(); assertEquals(1593, record.getSize()); }
@Test (timeout=3000) public void testSleepReducer() throws Exception { Configuration conf = new Configuration(); conf.setInt(JobContext.NUM_REDUCES, 2); CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true); conf.setBoolean(FileOutputFormat.COMPRESS, true); CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true); conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true); TaskAttemptID taskId = new TaskAttemptID(); RawKeyValueIterator input = new FakeRawKeyValueReducerIterator(); Counter counter = new GenericCounter(); Counter inputValueCounter = new GenericCounter(); RecordWriter<NullWritable, NullWritable> output = new LoadRecordReduceWriter(); OutputCommitter committer = new CustomOutputCommitter(); StatusReporter reporter = new DummyReporter(); RawComparator<GridmixKey> comparator = new FakeRawComparator(); ReduceContext<GridmixKey, NullWritable, NullWritable, NullWritable> reducecontext = new ReduceContextImpl<GridmixKey, NullWritable, NullWritable, NullWritable>( conf, taskId, input, counter, inputValueCounter, output, committer, reporter, comparator, GridmixKey.class, NullWritable.class); org.apache.hadoop.mapreduce.Reducer<GridmixKey, NullWritable, NullWritable, NullWritable>.Context context = new WrappedReducer<GridmixKey, NullWritable, NullWritable, NullWritable>() .getReducerContext(reducecontext); SleepReducer test = new SleepReducer(); long start = System.currentTimeMillis(); test.setup(context); long sleeper = context.getCurrentKey().getReduceOutputBytes(); // status has been changed assertEquals("Sleeping... " + sleeper + " ms left", context.getStatus()); // should sleep 0.9 sec assertTrue(System.currentTimeMillis() >= (start + sleeper)); test.cleanup(context); // status has been changed again assertEquals("Slept for " + sleeper, context.getStatus()); }
@Test (timeout=1000) public void testLoadJobLoadReducer() throws Exception { LoadJob.LoadReducer test = new LoadJob.LoadReducer(); Configuration conf = new Configuration(); conf.setInt(JobContext.NUM_REDUCES, 2); CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true); conf.setBoolean(FileOutputFormat.COMPRESS, true); CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true); conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true); TaskAttemptID taskid = new TaskAttemptID(); RawKeyValueIterator input = new FakeRawKeyValueIterator(); Counter counter = new GenericCounter(); Counter inputValueCounter = new GenericCounter(); LoadRecordWriter output = new LoadRecordWriter(); OutputCommitter committer = new CustomOutputCommitter(); StatusReporter reporter = new DummyReporter(); RawComparator<GridmixKey> comparator = new FakeRawComparator(); ReduceContext<GridmixKey, GridmixRecord, NullWritable, GridmixRecord> reduceContext = new ReduceContextImpl<GridmixKey, GridmixRecord, NullWritable, GridmixRecord>( conf, taskid, input, counter, inputValueCounter, output, committer, reporter, comparator, GridmixKey.class, GridmixRecord.class); // read for previous data reduceContext.nextKeyValue(); org.apache.hadoop.mapreduce.Reducer<GridmixKey, GridmixRecord, NullWritable, GridmixRecord>.Context context = new WrappedReducer<GridmixKey, GridmixRecord, NullWritable, GridmixRecord>() .getReducerContext(reduceContext); // test.setup(context); test.run(context); // have been readed 9 records (-1 for previous) assertEquals(9, counter.getValue()); assertEquals(10, inputValueCounter.getValue()); assertEquals(1, output.getData().size()); GridmixRecord record = output.getData().values().iterator() .next(); assertEquals(1593, record.getSize()); }
@Test (timeout=1000) public void testSleepReducer() throws Exception { Configuration conf = new Configuration(); conf.setInt(JobContext.NUM_REDUCES, 2); CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true); conf.setBoolean(FileOutputFormat.COMPRESS, true); CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true); conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true); TaskAttemptID taskId = new TaskAttemptID(); RawKeyValueIterator input = new FakeRawKeyValueReducerIterator(); Counter counter = new GenericCounter(); Counter inputValueCounter = new GenericCounter(); RecordWriter<NullWritable, NullWritable> output = new LoadRecordReduceWriter(); OutputCommitter committer = new CustomOutputCommitter(); StatusReporter reporter = new DummyReporter(); RawComparator<GridmixKey> comparator = new FakeRawComparator(); ReduceContext<GridmixKey, NullWritable, NullWritable, NullWritable> reducecontext = new ReduceContextImpl<GridmixKey, NullWritable, NullWritable, NullWritable>( conf, taskId, input, counter, inputValueCounter, output, committer, reporter, comparator, GridmixKey.class, NullWritable.class); org.apache.hadoop.mapreduce.Reducer<GridmixKey, NullWritable, NullWritable, NullWritable>.Context context = new WrappedReducer<GridmixKey, NullWritable, NullWritable, NullWritable>() .getReducerContext(reducecontext); SleepReducer test = new SleepReducer(); long start = System.currentTimeMillis(); test.setup(context); long sleeper = context.getCurrentKey().getReduceOutputBytes(); // status has been changed assertEquals("Sleeping... " + sleeper + " ms left", context.getStatus()); // should sleep 0.9 sec assertTrue(System.currentTimeMillis() >= (start + sleeper)); test.cleanup(context); // status has been changed again assertEquals("Slept for " + sleeper, context.getStatus()); }