Java 类org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl 实例源码

项目:mapreduce-fork    文件:GridMixRunner.java   
public void addJob(int numReducers, boolean mapoutputCompressed,
    boolean outputCompressed, Size size, JobControl gridmix) {
  final String prop = String.format("combiner.%sJobs.inputFiles", size);
  final String indir = getInputDirsFor(prop, size.defaultPath(VARCOMPSEQ));
  final String outdir = addTSSuffix("perf-out/combiner-out-dir-" + size);

  StringBuffer sb = new StringBuffer();
  sb.append("-r ").append(numReducers).append(" ");
  sb.append("-indir ").append(indir).append(" ");
  sb.append("-outdir ").append(outdir);
  sb.append("-mapoutputCompressed ");
  sb.append(mapoutputCompressed).append(" ");
  sb.append("-outputCompressed ").append(outputCompressed);

  String[] args = sb.toString().split(" ");
  clearDir(outdir);
  try {
    Job job = CombinerJobCreator.createJob(args);
    job.setJobName("GridmixCombinerJob." + size);
    ControlledJob cjob = new ControlledJob(job, null);
    gridmix.addJob(cjob);
  } catch (Exception ex) {
    ex.printStackTrace();
  }
}
项目:hadoop    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:aliyun-oss-hadoop-fs    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:big-c    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:hadoop-plus    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:FlexMap    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:hops    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:hadoop-TCP    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:hardfs    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:hadoop-on-lustre2    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:mapreduce-fork    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:mapreduce-fork    文件:GridMixRunner.java   
public void addJob(int numReducers, boolean mapoutputCompressed,
    boolean outputCompressed, Size size, JobControl gridmix) {
  final String prop = String.format("streamSort.%sJobs.inputFiles", size);
  final String indir = 
    getInputDirsFor(prop, size.defaultPath(VARINFLTEXT));
  final String outdir = addTSSuffix("perf-out/stream-out-dir-" + size);

  StringBuffer sb = new StringBuffer();
  sb.append("-input ").append(indir).append(" ");
  sb.append("-output ").append(outdir).append(" ");
  sb.append("-mapper cat ");
  sb.append("-reducer cat ");
  sb.append("-numReduceTasks ").append(numReducers);
  String[] args = sb.toString().split(" ");

  clearDir(outdir);
  try {
    Configuration conf = StreamJob.createJob(args);
    conf.setBoolean(FileOutputFormat.COMPRESS, outputCompressed);
    conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
    Job job = new Job(conf, "GridmixStreamingSorter." + size);
    ControlledJob cjob = new ControlledJob(job, null);
    gridmix.addJob(cjob);
  } catch (Exception ex) {
    ex.printStackTrace();
  }
}
项目:mapreduce-fork    文件:GridMixRunner.java   
public void addJob(int numReducers, boolean mapoutputCompressed,
    boolean outputCompressed, Size size, JobControl gridmix) {
  final String prop = String.format("javaSort.%sJobs.inputFiles", size);
  final String indir = getInputDirsFor(prop,
                         size.defaultPath(VARINFLTEXT));
  final String outdir = addTSSuffix("perf-out/sort-out-dir-" + size);

  clearDir(outdir);

  try {
    Configuration conf = new Configuration();
    conf.setBoolean(FileOutputFormat.COMPRESS, outputCompressed);
    conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
    Job job = new Job(conf);
    job.setJarByClass(Sort.class);
    job.setJobName("GridmixJavaSorter." + size);
    job.setMapperClass(Mapper.class);
    job.setReducerClass(Reducer.class);

    job.setNumReduceTasks(numReducers);
    job.setInputFormatClass(KeyValueTextInputFormat.class);
    job.setOutputFormatClass(TextOutputFormat.class);

    job.setOutputKeyClass(org.apache.hadoop.io.Text.class);
    job.setOutputValueClass(org.apache.hadoop.io.Text.class);

    FileInputFormat.addInputPaths(job, indir);
    FileOutputFormat.setOutputPath(job, new Path(outdir));

    ControlledJob cjob = new ControlledJob(job, null);
    gridmix.addJob(cjob);
  } catch (Exception ex) {
    ex.printStackTrace();
  }
}
项目:mapreduce-fork    文件:GridMixRunner.java   
public void addJob(int numReducers, boolean mapoutputCompressed,
    boolean outputCompressed, Size size, JobControl gridmix) {
  final String prop = String.format("webdataScan.%sJobs.inputFiles", size);
  final String indir = getInputDirsFor(prop, size.defaultPath(VARCOMPSEQ));
  final String outdir = addTSSuffix("perf-out/webdata-scan-out-dir-"
                          + size);
  StringBuffer sb = new StringBuffer();
  sb.append("-keepmap 0.2 ");
  sb.append("-keepred 5 ");
  sb.append("-inFormat");
  sb.append(" org.apache.hadoop.mapreduce." +
    "lib.input.SequenceFileInputFormat ");
  sb.append("-outFormat");
  sb.append(" org.apache.hadoop.mapreduce." +
    "lib.output.SequenceFileOutputFormat ");
  sb.append("-outKey org.apache.hadoop.io.Text ");
  sb.append("-outValue org.apache.hadoop.io.Text ");
  sb.append("-indir ").append(indir).append(" ");
  sb.append("-outdir ").append(outdir).append(" ");
  sb.append("-r ").append(numReducers);

  String[] args = sb.toString().split(" ");
  clearDir(outdir);
  try {
    Job job = GenericMRLoadJobCreator.createJob(
        args, mapoutputCompressed, outputCompressed);
    job.setJobName("GridmixWebdatascan." + size);
    ControlledJob cjob = new ControlledJob(job, null);
    gridmix.addJob(cjob);
  } catch (Exception ex) {
    System.out.println(ex.getStackTrace());
  }
}
项目:mapreduce-fork    文件:GridMixRunner.java   
public void addJob(int numReducers, boolean mapoutputCompressed,
    boolean outputCompressed, Size size, JobControl gridmix) {
  final String prop = String.format("webdataSort.%sJobs.inputFiles", size);
  final String indir = getInputDirsFor(prop, size.defaultPath(VARCOMPSEQ));
  final String outdir = 
    addTSSuffix("perf-out/webdata-sort-out-dir-" + size);

  StringBuffer sb = new StringBuffer();
  sb.append("-keepmap 100 ");
  sb.append("-keepred 100 ");
  sb.append("-inFormat org.apache.hadoop.mapreduce." +
    "lib.input.SequenceFileInputFormat ");
  sb.append("-outFormat org.apache.hadoop.mapreduce." +
    "lib.output.SequenceFileOutputFormat ");
  sb.append("-outKey org.apache.hadoop.io.Text ");
  sb.append("-outValue org.apache.hadoop.io.Text ");
  sb.append("-indir ").append(indir).append(" ");
  sb.append("-outdir ").append(outdir).append(" ");
  sb.append("-r ").append(numReducers);

  String[] args = sb.toString().split(" ");
  clearDir(outdir);
  try {
    Job job = GenericMRLoadJobCreator.createJob(
        args, mapoutputCompressed, outputCompressed);
    job.setJobName("GridmixWebdataSort." + size);
    ControlledJob cjob = new ControlledJob(job, null);
    gridmix.addJob(cjob);
  } catch (Exception ex) {
    System.out.println(ex.getStackTrace());
  }
}
项目:hadoop    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[]) 
    throws IOException {
  return createValueAggregatorJobs(args, null);
}
项目:aliyun-oss-hadoop-fs    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[]) 
    throws IOException {
  return createValueAggregatorJobs(args, null);
}
项目:big-c    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[]) 
    throws IOException {
  return createValueAggregatorJobs(args, null);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[]) 
    throws IOException {
  return createValueAggregatorJobs(args, null);
}
项目:hadoop-plus    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[]) 
    throws IOException {
  return createValueAggregatorJobs(args, null);
}
项目:FlexMap    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[]) 
    throws IOException {
  return createValueAggregatorJobs(args, null);
}
项目:hops    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[]) 
    throws IOException {
  return createValueAggregatorJobs(args, null);
}
项目:guagua    文件:GuaguaMapReduceClient.java   
/**
 * Default constructor. Construct default JobControl instance.
 */
public GuaguaMapReduceClient() {
    this.jc = new JobControl(INIT_JOB_ID_PREFIX);
}
项目:hadoop-TCP    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[]) 
    throws IOException {
  return createValueAggregatorJobs(args, null);
}
项目:hardfs    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[]) 
    throws IOException {
  return createValueAggregatorJobs(args, null);
}
项目:hadoop-on-lustre2    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[]) 
    throws IOException {
  return createValueAggregatorJobs(args, null);
}
项目:mapreduce-fork    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[]) 
    throws IOException {
  return createValueAggregatorJobs(args, null);
}
项目:mapreduce-fork    文件:GridMixRunner.java   
public abstract void addJob(int numReducers, boolean mapComp,
boolean outComp, Size size, JobControl gridmix);
项目:mapreduce-fork    文件:GridMixRunner.java   
public GridMixRunner() throws IOException {
  gridmix = new JobControl("GridMix");
  if (null == config || null == fs) {
    throw new IOException("Bad configuration. Cannot continue.");
  }
}
项目:guagua    文件:GuaguaMapReduceClient.java   
/**
 * Compute the progress of the current job submitted through the JobControl object jc to the JobClient jobClient
 * 
 * @param jc
 *            The JobControl object that has been submitted
 * @param jobClient
 *            The JobClient to which it has been submitted
 * @return The progress as a percentage in double format
 * @throws IOException
 *             In case any IOException connecting to JobTracker.
 */
protected double calculateProgress(Set<String> successJobs, JobControl jc, JobClient jobClient) throws IOException {
    double prog = 0.0;
    prog += Math.max(jc.getSuccessfulJobList().size(), successJobs.size());

    List<ControlledJob> runnJobs = jc.getRunningJobList();
    for(ControlledJob cjob: runnJobs) {
        prog += progressOfRunningJob(cjob, jobClient);
    }
    return prog;
}