public static Counters runJob(JobConf conf) throws Exception { conf.setMapperClass(MapMB.class); conf.setReducerClass(IdentityReducer.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); conf.setNumReduceTasks(1); conf.setInputFormat(FakeIF.class); FileInputFormat.setInputPaths(conf, new Path("/in")); final Path outp = new Path("/out"); FileOutputFormat.setOutputPath(conf, outp); RunningJob job = null; try { job = JobClient.runJob(conf); assertTrue(job.isSuccessful()); } finally { FileSystem fs = dfsCluster.getFileSystem(); if (fs.exists(outp)) { fs.delete(outp, true); } } return job.getCounters(); }
@SuppressWarnings({ "deprecation", "unchecked" }) public static TaskCompletionEvent[] runJob(JobConf conf, Class mapperClass, boolean enableNoFetchEmptyMapOutputs) throws Exception { conf.setMapperClass(mapperClass); conf.setReducerClass(IdentityReducer.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); conf.setNumMapTasks(3); conf.setNumReduceTasks(1); conf.setInputFormat(FakeIF.class); conf.setBoolean("mapred.enable.no.fetch.map.outputs", enableNoFetchEmptyMapOutputs); FileInputFormat.setInputPaths(conf, new Path("/in")); final Path outp = new Path("/out"); FileOutputFormat.setOutputPath(conf, outp); RunningJob job = null; job = JobClient.runJob(conf); assertTrue(job.isSuccessful()); return job.getTaskCompletionEvents(0); }
public static TaskCompletionEvent[] runJob(JobConf conf, Class mapperClass, boolean enableNoFetchEmptyMapOutputs) throws Exception { conf.setMapperClass(mapperClass); conf.setReducerClass(IdentityReducer.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); conf.setNumMapTasks(3); conf.setNumReduceTasks(1); conf.setInputFormat(FakeIF.class); conf.setBoolean("mapred.enable.no.fetch.map.outputs", enableNoFetchEmptyMapOutputs); FileInputFormat.setInputPaths(conf, new Path("/in")); final Path outp = new Path("/out"); FileOutputFormat.setOutputPath(conf, outp); RunningJob job = null; job = JobClient.runJob(conf); assertTrue(job.isSuccessful()); return job.getTaskCompletionEvents(0); }
private void runTest(String name, int keyLen, int valLen, int bigKeyLen, int bigValLen, int recordsNumPerMapper, int sortMb, float spillPer, int numMapperTasks, int numReducerTask, double[] reducerRecPercents, int[] numBigRecordsStart, int[] numBigRecordsMiddle, int[] numBigRecordsEnd) throws Exception { JobConf conf = mrCluster.createJobConf(); conf.setInt("io.sort.mb", sortMb); conf.set("io.sort.spill.percent", Float.toString(spillPer)); conf.setInt("test.key.length", keyLen); conf.setInt("test.value.length", valLen); conf.setInt("test.bigkey.length", bigKeyLen); conf.setInt("test.bigvalue.length", bigValLen); conf.setNumMapTasks(numMapperTasks); conf.setNumReduceTasks(numReducerTask); conf.setInputFormat(FakeIF.class); conf.setOutputFormat(NullOutputFormat.class); conf.setMapperClass(TestNewCollectorMapper.class); conf.setReducerClass(TestNewCollectorReducer.class); conf.setMapOutputKeyClass(TestNewCollectorKey.class); conf.setMapOutputValueClass(BytesWritable.class); conf.setBoolean("mapred.map.output.blockcollector", true); RecordNumStore.setJobConf(numReducerTask, numMapperTasks, recordsNumPerMapper, reducerRecPercents, numBigRecordsStart, numBigRecordsMiddle, numBigRecordsEnd, conf); RecordNumStore.getInst(conf); LOG.info("Running " + name); JobClient.runJob(conf); }