public ReduceContextImpl(Configuration conf, TaskAttemptID taskid, RawKeyValueIterator input, Counter inputKeyCounter, Counter inputValueCounter, RecordWriter<KEYOUT,VALUEOUT> output, OutputCommitter committer, StatusReporter reporter, RawComparator<KEYIN> comparator, Class<KEYIN> keyClass, Class<VALUEIN> valueClass ) throws InterruptedException, IOException{ super(conf, taskid, output, committer, reporter); this.input = input; this.inputKeyCounter = inputKeyCounter; this.inputValueCounter = inputValueCounter; this.comparator = comparator; this.serializationFactory = new SerializationFactory(conf); this.keyDeserializer = serializationFactory.getDeserializer(keyClass); this.keyDeserializer.open(buffer); this.valueDeserializer = serializationFactory.getDeserializer(valueClass); this.valueDeserializer.open(buffer); hasMore = input.next(); this.keyClass = keyClass; this.valueClass = valueClass; this.conf = conf; this.taskid = taskid; }
@SuppressWarnings({"rawtypes", "unchecked"}) @Test (timeout=10000) public void testLoadMapper() throws Exception { Configuration conf = new Configuration(); conf.setInt(JobContext.NUM_REDUCES, 2); CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true); conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true); TaskAttemptID taskId = new TaskAttemptID(); RecordReader<NullWritable, GridmixRecord> reader = new FakeRecordReader(); LoadRecordGkGrWriter writer = new LoadRecordGkGrWriter(); OutputCommitter committer = new CustomOutputCommitter(); StatusReporter reporter = new TaskAttemptContextImpl.DummyReporter(); LoadSplit split = getLoadSplit(); MapContext<NullWritable, GridmixRecord, GridmixKey, GridmixRecord> mapContext = new MapContextImpl<NullWritable, GridmixRecord, GridmixKey, GridmixRecord>( conf, taskId, reader, writer, committer, reporter, split); // context Context ctx = new WrappedMapper<NullWritable, GridmixRecord, GridmixKey, GridmixRecord>() .getMapContext(mapContext); reader.initialize(split, ctx); ctx.getConfiguration().setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true); CompressionEmulationUtil.setCompressionEmulationEnabled( ctx.getConfiguration(), true); LoadJob.LoadMapper mapper = new LoadJob.LoadMapper(); // setup, map, clean mapper.run(ctx); Map<GridmixKey, GridmixRecord> data = writer.getData(); // check result assertEquals(2, data.size()); }
public TaskInputOutputContextImpl(Configuration conf, TaskAttemptID taskid, RecordWriter<KEYOUT,VALUEOUT> output, OutputCommitter committer, StatusReporter reporter) { super(conf, taskid, reporter); this.output = output; this.committer = committer; }
public TaskAttemptContextImpl(Configuration conf, TaskAttemptID taskId, StatusReporter reporter) { super(conf, taskId.getJobID()); this.taskId = taskId; this.reporter = reporter; }
@SuppressWarnings({"unchecked", "rawtypes"}) @Test (timeout=30000) public void testSleepMapper() throws Exception { SleepJob.SleepMapper test = new SleepJob.SleepMapper(); Configuration conf = new Configuration(); conf.setInt(JobContext.NUM_REDUCES, 2); CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true); conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true); TaskAttemptID taskId = new TaskAttemptID(); FakeRecordLLReader reader = new FakeRecordLLReader(); LoadRecordGkNullWriter writer = new LoadRecordGkNullWriter(); OutputCommitter committer = new CustomOutputCommitter(); StatusReporter reporter = new TaskAttemptContextImpl.DummyReporter(); SleepSplit split = getSleepSplit(); MapContext<LongWritable, LongWritable, GridmixKey, NullWritable> mapcontext = new MapContextImpl<LongWritable, LongWritable, GridmixKey, NullWritable>( conf, taskId, reader, writer, committer, reporter, split); Context context = new WrappedMapper<LongWritable, LongWritable, GridmixKey, NullWritable>() .getMapContext(mapcontext); long start = System.currentTimeMillis(); LOG.info("start:" + start); LongWritable key = new LongWritable(start + 2000); LongWritable value = new LongWritable(start + 2000); // should slip 2 sec test.map(key, value, context); LOG.info("finish:" + System.currentTimeMillis()); assertTrue(System.currentTimeMillis() >= (start + 2000)); test.cleanup(context); assertEquals(1, writer.getData().size()); }
public MapContextImpl(Configuration conf, TaskAttemptID taskid, RecordReader<KEYIN,VALUEIN> reader, RecordWriter<KEYOUT,VALUEOUT> writer, OutputCommitter committer, StatusReporter reporter, InputSplit split) { super(conf, taskid, writer, committer, reporter); this.reader = reader; this.split = split; }
/** * Instantiates MapContext under Hadoop 1 and MapContextImpl under Hadoop 2. */ public static MapContext newMapContext(Configuration conf, TaskAttemptID taskAttemptID, RecordReader recordReader, RecordWriter recordWriter, OutputCommitter outputCommitter, StatusReporter statusReporter, InputSplit inputSplit) { return (MapContext) newInstance(MAP_CONTEXT_CONSTRUCTOR, conf, taskAttemptID, recordReader, recordWriter, outputCommitter, statusReporter, inputSplit); }
@SuppressWarnings({"unchecked", "rawtypes"}) @Test (timeout=10000) public void testSleepMapper() throws Exception { SleepJob.SleepMapper test = new SleepJob.SleepMapper(); Configuration conf = new Configuration(); conf.setInt(JobContext.NUM_REDUCES, 2); CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true); conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true); TaskAttemptID taskId = new TaskAttemptID(); FakeRecordLLReader reader = new FakeRecordLLReader(); LoadRecordGkNullWriter writer = new LoadRecordGkNullWriter(); OutputCommitter committer = new CustomOutputCommitter(); StatusReporter reporter = new TaskAttemptContextImpl.DummyReporter(); SleepSplit split = getSleepSplit(); MapContext<LongWritable, LongWritable, GridmixKey, NullWritable> mapcontext = new MapContextImpl<LongWritable, LongWritable, GridmixKey, NullWritable>( conf, taskId, reader, writer, committer, reporter, split); Context context = new WrappedMapper<LongWritable, LongWritable, GridmixKey, NullWritable>() .getMapContext(mapcontext); long start = System.currentTimeMillis(); LOG.info("start:" + start); LongWritable key = new LongWritable(start + 2000); LongWritable value = new LongWritable(start + 2000); // should slip 2 sec test.map(key, value, context); LOG.info("finish:" + System.currentTimeMillis()); assertTrue(System.currentTimeMillis() >= (start + 2000)); test.cleanup(context); assertEquals(1, writer.getData().size()); }
public MultiMapContextImpl(Configuration conf, TaskAttemptID taskid, RecordReader<KEYIN,VALUEIN> reader, RecordWriter<KEYOUT,VALUEOUT> writer, OutputCommitter committer, StatusReporter reporter, InputSplit[] split) { super(conf, taskid, writer, committer, reporter); this.reader = reader; this.split = split; }
@SuppressWarnings({ "rawtypes", "unchecked" }) public Reducer.Context createReduceContext(Configuration conf, TaskAttemptID taskid, RawKeyValueIterator input, Counter inputKeyCounter, Counter inputValueCounter, RecordWriter output, OutputCommitter committer, StatusReporter reporter, RawComparator comparator, Class keyClass, Class valueClass) throws HyracksDataException { try { return new WrappedReducer().getReducerContext(new ReduceContextImpl(conf, taskid, input, inputKeyCounter, inputValueCounter, output, committer, reporter, comparator, keyClass, valueClass)); } catch (Exception e) { throw new HyracksDataException(e); } }