Java 类org.apache.hadoop.mapreduce.v2.app.speculate.TaskRuntimeEstimator 实例源码

项目:hadoop    文件:TestSpeculativeExecution.java   
private Job runSpecTest(boolean mapspec, boolean redspec)
    throws IOException, ClassNotFoundException, InterruptedException {

  Path first = createTempFile("specexec_map_input1", "a\nz");
  Path secnd = createTempFile("specexec_map_input2", "a\nz");

  Configuration conf = mrCluster.getConfig();
  conf.setBoolean(MRJobConfig.MAP_SPECULATIVE,mapspec);
  conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE,redspec);
  conf.setClass(MRJobConfig.MR_AM_TASK_ESTIMATOR,
          TestSpecEstimator.class,
          TaskRuntimeEstimator.class);

  Job job = Job.getInstance(conf);
  job.setJarByClass(TestSpeculativeExecution.class);
  job.setMapperClass(SpeculativeMapper.class);
  job.setReducerClass(SpeculativeReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(IntWritable.class);
  job.setNumReduceTasks(2);
  FileInputFormat.setInputPaths(job, first);
  FileInputFormat.addInputPath(job, secnd);
  FileOutputFormat.setOutputPath(job, TEST_OUT_DIR);

  // Delete output directory if it exists.
  try {
    localFs.delete(TEST_OUT_DIR,true);
  } catch (IOException e) {
    // ignore
  }

  // Creates the Job Configuration
  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  job.setMaxMapAttempts(2);

  job.submit();

  return job;
}
项目:aliyun-oss-hadoop-fs    文件:TestSpeculativeExecution.java   
private Job runSpecTest(boolean mapspec, boolean redspec)
    throws IOException, ClassNotFoundException, InterruptedException {

  Path first = createTempFile("specexec_map_input1", "a\nz");
  Path secnd = createTempFile("specexec_map_input2", "a\nz");

  Configuration conf = mrCluster.getConfig();
  conf.setBoolean(MRJobConfig.MAP_SPECULATIVE,mapspec);
  conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE,redspec);
  conf.setClass(MRJobConfig.MR_AM_TASK_ESTIMATOR,
          TestSpecEstimator.class,
          TaskRuntimeEstimator.class);

  Job job = Job.getInstance(conf);
  job.setJarByClass(TestSpeculativeExecution.class);
  job.setMapperClass(SpeculativeMapper.class);
  job.setReducerClass(SpeculativeReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(IntWritable.class);
  job.setNumReduceTasks(2);
  FileInputFormat.setInputPaths(job, first);
  FileInputFormat.addInputPath(job, secnd);
  FileOutputFormat.setOutputPath(job, TEST_OUT_DIR);

  // Delete output directory if it exists.
  try {
    localFs.delete(TEST_OUT_DIR,true);
  } catch (IOException e) {
    // ignore
  }

  // Creates the Job Configuration
  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  job.setMaxMapAttempts(2);

  job.submit();

  return job;
}
项目:big-c    文件:TestSpeculativeExecution.java   
private Job runSpecTest(boolean mapspec, boolean redspec)
    throws IOException, ClassNotFoundException, InterruptedException {

  Path first = createTempFile("specexec_map_input1", "a\nz");
  Path secnd = createTempFile("specexec_map_input2", "a\nz");

  Configuration conf = mrCluster.getConfig();
  conf.setBoolean(MRJobConfig.MAP_SPECULATIVE,mapspec);
  conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE,redspec);
  conf.setClass(MRJobConfig.MR_AM_TASK_ESTIMATOR,
          TestSpecEstimator.class,
          TaskRuntimeEstimator.class);

  Job job = Job.getInstance(conf);
  job.setJarByClass(TestSpeculativeExecution.class);
  job.setMapperClass(SpeculativeMapper.class);
  job.setReducerClass(SpeculativeReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(IntWritable.class);
  job.setNumReduceTasks(2);
  FileInputFormat.setInputPaths(job, first);
  FileInputFormat.addInputPath(job, secnd);
  FileOutputFormat.setOutputPath(job, TEST_OUT_DIR);

  // Delete output directory if it exists.
  try {
    localFs.delete(TEST_OUT_DIR,true);
  } catch (IOException e) {
    // ignore
  }

  // Creates the Job Configuration
  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  job.setMaxMapAttempts(2);

  job.submit();

  return job;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestSpeculativeExecution.java   
private Job runSpecTest(boolean mapspec, boolean redspec)
    throws IOException, ClassNotFoundException, InterruptedException {

  Path first = createTempFile("specexec_map_input1", "a\nz");
  Path secnd = createTempFile("specexec_map_input2", "a\nz");

  Configuration conf = mrCluster.getConfig();
  conf.setBoolean(MRJobConfig.MAP_SPECULATIVE,mapspec);
  conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE,redspec);
  conf.setClass(MRJobConfig.MR_AM_TASK_ESTIMATOR,
          TestSpecEstimator.class,
          TaskRuntimeEstimator.class);

  Job job = Job.getInstance(conf);
  job.setJarByClass(TestSpeculativeExecution.class);
  job.setMapperClass(SpeculativeMapper.class);
  job.setReducerClass(SpeculativeReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(IntWritable.class);
  job.setNumReduceTasks(2);
  FileInputFormat.setInputPaths(job, first);
  FileInputFormat.addInputPath(job, secnd);
  FileOutputFormat.setOutputPath(job, TEST_OUT_DIR);

  // Delete output directory if it exists.
  try {
    localFs.delete(TEST_OUT_DIR,true);
  } catch (IOException e) {
    // ignore
  }

  // Creates the Job Configuration
  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  job.setMaxMapAttempts(2);

  job.submit();

  return job;
}
项目:hadoop-plus    文件:TestSpeculativeExecution.java   
private Job runSpecTest(boolean mapspec, boolean redspec)
    throws IOException, ClassNotFoundException, InterruptedException {

  Path first = createTempFile("specexec_map_input1", "a\nz");
  Path secnd = createTempFile("specexec_map_input2", "a\nz");

  Configuration conf = mrCluster.getConfig();
  conf.setBoolean(MRJobConfig.MAP_SPECULATIVE,mapspec);
  conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE,redspec);
  conf.setClass(MRJobConfig.MR_AM_TASK_ESTIMATOR,
          TestSpecEstimator.class,
          TaskRuntimeEstimator.class);

  Job job = Job.getInstance(conf);
  job.setJarByClass(TestSpeculativeExecution.class);
  job.setMapperClass(SpeculativeMapper.class);
  job.setReducerClass(SpeculativeReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(IntWritable.class);
  job.setNumReduceTasks(2);
  FileInputFormat.setInputPaths(job, first);
  FileInputFormat.addInputPath(job, secnd);
  FileOutputFormat.setOutputPath(job, TEST_OUT_DIR);

  // Delete output directory if it exists.
  try {
    localFs.delete(TEST_OUT_DIR,true);
  } catch (IOException e) {
    // ignore
  }

  // Creates the Job Configuration
  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  job.setMaxMapAttempts(2);

  job.submit();

  return job;
}
项目:FlexMap    文件:TestSpeculativeExecution.java   
private Job runSpecTest(boolean mapspec, boolean redspec)
    throws IOException, ClassNotFoundException, InterruptedException {

  Path first = createTempFile("specexec_map_input1", "a\nz");
  Path secnd = createTempFile("specexec_map_input2", "a\nz");

  Configuration conf = mrCluster.getConfig();
  conf.setBoolean(MRJobConfig.MAP_SPECULATIVE,mapspec);
  conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE,redspec);
  conf.setClass(MRJobConfig.MR_AM_TASK_ESTIMATOR,
          TestSpecEstimator.class,
          TaskRuntimeEstimator.class);

  Job job = Job.getInstance(conf);
  job.setJarByClass(TestSpeculativeExecution.class);
  job.setMapperClass(SpeculativeMapper.class);
  job.setReducerClass(SpeculativeReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(IntWritable.class);
  job.setNumReduceTasks(2);
  FileInputFormat.setInputPaths(job, first);
  FileInputFormat.addInputPath(job, secnd);
  FileOutputFormat.setOutputPath(job, TEST_OUT_DIR);

  // Delete output directory if it exists.
  try {
    localFs.delete(TEST_OUT_DIR,true);
  } catch (IOException e) {
    // ignore
  }

  // Creates the Job Configuration
  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  job.setMaxMapAttempts(2);

  job.submit();

  return job;
}
项目:hops    文件:TestSpeculativeExecution.java   
private Job runSpecTest(boolean mapspec, boolean redspec)
    throws IOException, ClassNotFoundException, InterruptedException {

  Path first = createTempFile("specexec_map_input1", "a\nz");
  Path secnd = createTempFile("specexec_map_input2", "a\nz");

  Configuration conf = mrCluster.getConfig();
  conf.setBoolean(MRJobConfig.MAP_SPECULATIVE,mapspec);
  conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE,redspec);
  conf.setClass(MRJobConfig.MR_AM_TASK_ESTIMATOR,
          TestSpecEstimator.class,
          TaskRuntimeEstimator.class);

  Job job = Job.getInstance(conf);
  job.setJarByClass(TestSpeculativeExecution.class);
  job.setMapperClass(SpeculativeMapper.class);
  job.setReducerClass(SpeculativeReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(IntWritable.class);
  job.setNumReduceTasks(2);
  FileInputFormat.setInputPaths(job, first);
  FileInputFormat.addInputPath(job, secnd);
  FileOutputFormat.setOutputPath(job, TEST_OUT_DIR);

  // Delete output directory if it exists.
  try {
    localFs.delete(TEST_OUT_DIR,true);
  } catch (IOException e) {
    // ignore
  }

  // Creates the Job Configuration
  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  job.setMaxMapAttempts(2);

  job.submit();

  return job;
}
项目:hadoop-TCP    文件:TestSpeculativeExecution.java   
private Job runSpecTest(boolean mapspec, boolean redspec)
    throws IOException, ClassNotFoundException, InterruptedException {

  Path first = createTempFile("specexec_map_input1", "a\nz");
  Path secnd = createTempFile("specexec_map_input2", "a\nz");

  Configuration conf = mrCluster.getConfig();
  conf.setBoolean(MRJobConfig.MAP_SPECULATIVE,mapspec);
  conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE,redspec);
  conf.setClass(MRJobConfig.MR_AM_TASK_ESTIMATOR,
          TestSpecEstimator.class,
          TaskRuntimeEstimator.class);

  Job job = Job.getInstance(conf);
  job.setJarByClass(TestSpeculativeExecution.class);
  job.setMapperClass(SpeculativeMapper.class);
  job.setReducerClass(SpeculativeReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(IntWritable.class);
  job.setNumReduceTasks(2);
  FileInputFormat.setInputPaths(job, first);
  FileInputFormat.addInputPath(job, secnd);
  FileOutputFormat.setOutputPath(job, TEST_OUT_DIR);

  // Delete output directory if it exists.
  try {
    localFs.delete(TEST_OUT_DIR,true);
  } catch (IOException e) {
    // ignore
  }

  // Creates the Job Configuration
  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  job.setMaxMapAttempts(2);

  job.submit();

  return job;
}
项目:hardfs    文件:TestSpeculativeExecution.java   
private Job runSpecTest(boolean mapspec, boolean redspec)
    throws IOException, ClassNotFoundException, InterruptedException {

  Path first = createTempFile("specexec_map_input1", "a\nz");
  Path secnd = createTempFile("specexec_map_input2", "a\nz");

  Configuration conf = mrCluster.getConfig();
  conf.setBoolean(MRJobConfig.MAP_SPECULATIVE,mapspec);
  conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE,redspec);
  conf.setClass(MRJobConfig.MR_AM_TASK_ESTIMATOR,
          TestSpecEstimator.class,
          TaskRuntimeEstimator.class);

  Job job = Job.getInstance(conf);
  job.setJarByClass(TestSpeculativeExecution.class);
  job.setMapperClass(SpeculativeMapper.class);
  job.setReducerClass(SpeculativeReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(IntWritable.class);
  job.setNumReduceTasks(2);
  FileInputFormat.setInputPaths(job, first);
  FileInputFormat.addInputPath(job, secnd);
  FileOutputFormat.setOutputPath(job, TEST_OUT_DIR);

  // Delete output directory if it exists.
  try {
    localFs.delete(TEST_OUT_DIR,true);
  } catch (IOException e) {
    // ignore
  }

  // Creates the Job Configuration
  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  job.setMaxMapAttempts(2);

  job.submit();

  return job;
}
项目:hadoop-on-lustre2    文件:TestSpeculativeExecution.java   
private Job runSpecTest(boolean mapspec, boolean redspec)
    throws IOException, ClassNotFoundException, InterruptedException {

  Path first = createTempFile("specexec_map_input1", "a\nz");
  Path secnd = createTempFile("specexec_map_input2", "a\nz");

  Configuration conf = mrCluster.getConfig();
  conf.setBoolean(MRJobConfig.MAP_SPECULATIVE,mapspec);
  conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE,redspec);
  conf.setClass(MRJobConfig.MR_AM_TASK_ESTIMATOR,
          TestSpecEstimator.class,
          TaskRuntimeEstimator.class);

  Job job = Job.getInstance(conf);
  job.setJarByClass(TestSpeculativeExecution.class);
  job.setMapperClass(SpeculativeMapper.class);
  job.setReducerClass(SpeculativeReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(IntWritable.class);
  job.setNumReduceTasks(2);
  FileInputFormat.setInputPaths(job, first);
  FileInputFormat.addInputPath(job, secnd);
  FileOutputFormat.setOutputPath(job, TEST_OUT_DIR);

  // Delete output directory if it exists.
  try {
    localFs.delete(TEST_OUT_DIR,true);
  } catch (IOException e) {
    // ignore
  }

  // Creates the Job Configuration
  job.addFileToClassPath(APP_JAR); // The AppMaster jar itself.
  job.setMaxMapAttempts(2);

  job.submit();

  return job;
}
项目:hadoop    文件:TestRuntimeEstimators.java   
@Test
public void testLegacyEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator = new LegacyTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hadoop    文件:TestRuntimeEstimators.java   
@Test
public void testExponentialEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator
      = new ExponentiallySmoothedTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:aliyun-oss-hadoop-fs    文件:TestRuntimeEstimators.java   
@Test
public void testLegacyEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator = new LegacyTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:aliyun-oss-hadoop-fs    文件:TestRuntimeEstimators.java   
@Test
public void testExponentialEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator
      = new ExponentiallySmoothedTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:big-c    文件:TestRuntimeEstimators.java   
@Test
public void testLegacyEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator = new LegacyTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:big-c    文件:TestRuntimeEstimators.java   
@Test
public void testExponentialEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator
      = new ExponentiallySmoothedTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestRuntimeEstimators.java   
@Test
public void testLegacyEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator = new LegacyTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestRuntimeEstimators.java   
@Test
public void testExponentialEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator
      = new ExponentiallySmoothedTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hadoop-plus    文件:TestRuntimeEstimators.java   
@Test
public void testLegacyEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator = new LegacyTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hadoop-plus    文件:TestRuntimeEstimators.java   
@Test
public void testExponentialEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator
      = new ExponentiallySmoothedTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:FlexMap    文件:TestRuntimeEstimators.java   
@Test
public void testLegacyEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator = new LegacyTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:FlexMap    文件:TestRuntimeEstimators.java   
@Test
public void testExponentialEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator
      = new ExponentiallySmoothedTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hops    文件:TestRuntimeEstimators.java   
@Test
public void testLegacyEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator = new LegacyTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hops    文件:TestRuntimeEstimators.java   
@Test
public void testExponentialEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator
      = new ExponentiallySmoothedTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hadoop-TCP    文件:TestRuntimeEstimators.java   
@Test
public void testLegacyEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator = new LegacyTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hadoop-TCP    文件:TestRuntimeEstimators.java   
@Test
public void testExponentialEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator
      = new ExponentiallySmoothedTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hardfs    文件:TestRuntimeEstimators.java   
@Test
public void testLegacyEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator = new LegacyTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hardfs    文件:TestRuntimeEstimators.java   
@Test
public void testExponentialEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator
      = new ExponentiallySmoothedTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hadoop-on-lustre2    文件:TestRuntimeEstimators.java   
@Test
public void testLegacyEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator = new LegacyTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}
项目:hadoop-on-lustre2    文件:TestRuntimeEstimators.java   
@Test
public void testExponentialEstimator() throws Exception {
  TaskRuntimeEstimator specificEstimator
      = new ExponentiallySmoothedTaskRuntimeEstimator();
  coreTestEstimator(specificEstimator, 3);
}