Java 类org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob 实例源码

项目:Deep_learning_using_Java    文件:Recommendation_program.java   
public int run(String[] args) throws Exception {
    System.out.println(Arrays.toString(args));
   /* getting the chunk of data and converting to corresponding Key Value Pairs */
    @SuppressWarnings("deprecation")
    String intermediateFileDir = "tmp";
    String intermediateFileDirFile =intermediateFileDir +"/part-r-00000";
    JobControl control = new JobControl("ChainMapReduce");
    ControlledJob step1 = new ControlledJob(jobListFriends(args[0], intermediateFileDir), null);
    ControlledJob step2 = new ControlledJob(jobRecommendFriends(intermediateFileDirFile, args[1]), Arrays.asList(step1));
    control.addJob(step1);
    control.addJob(step2);
    Thread workFlowThread =  new Thread(control, "workflowthread");
    workFlowThread.setDaemon(true);
    workFlowThread.start();  
    return 0;
}
项目:mapreduce-fork    文件:GridMixRunner.java   
public void addJob(int numReducers, boolean mapoutputCompressed,
    boolean outputCompressed, Size size, JobControl gridmix) {
  final String prop = String.format("combiner.%sJobs.inputFiles", size);
  final String indir = getInputDirsFor(prop, size.defaultPath(VARCOMPSEQ));
  final String outdir = addTSSuffix("perf-out/combiner-out-dir-" + size);

  StringBuffer sb = new StringBuffer();
  sb.append("-r ").append(numReducers).append(" ");
  sb.append("-indir ").append(indir).append(" ");
  sb.append("-outdir ").append(outdir);
  sb.append("-mapoutputCompressed ");
  sb.append(mapoutputCompressed).append(" ");
  sb.append("-outputCompressed ").append(outputCompressed);

  String[] args = sb.toString().split(" ");
  clearDir(outdir);
  try {
    Job job = CombinerJobCreator.createJob(args);
    job.setJobName("GridmixCombinerJob." + size);
    ControlledJob cjob = new ControlledJob(job, null);
    gridmix.addJob(cjob);
  } catch (Exception ex) {
    ex.printStackTrace();
  }
}
项目:hadoop    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:hadoop    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:aliyun-oss-hadoop-fs    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:aliyun-oss-hadoop-fs    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:big-c    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:big-c    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:hadoop-plus    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:hadoop-plus    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:FlexMap    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:FlexMap    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:hops    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:hops    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:guagua    文件:GuaguaMapReduceClient.java   
/**
 * Add new job to JobControl instance.
 */
public synchronized void addJob(String[] args) throws IOException {
    Job job = createJob(args);
    this.jc.addJob(new ControlledJob(job, null));
    if(this.jobIndexMap.containsKey(job.getJobName())) {
        throw new IllegalStateException("Job name should be unique. please check name with: " + job.getJobName());
    }
    this.jobIndexMap.put(job.getJobName(), this.jobIndex);
    this.jobIndexParams.put(this.jobIndex, args);
    this.jobRunningTimes.put(this.jobIndex, 1);
    this.jobIndex += 1;
}
项目:guagua    文件:GuaguaMapReduceClient.java   
public String toFakedStateString(ControlledJob controlledJob) {
    StringBuffer sb = new StringBuffer();
    sb.append("job name:\t").append(controlledJob.getJob().getJobName()).append("\n");
    sb.append("job id:\t").append(controlledJob.getJobID()).append("\n");
    sb.append("job state:\t").append("SUCCESS").append("\n");
    sb.append("job mapred id:\t").append(controlledJob.getJob().getJobID()).append("\n");
    sb.append("job message:\t").append(" successful job").append("\n");
    sb.append("job has no depending job:\t").append("\n");
    return sb.toString();
}
项目:hadoop-TCP    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:hadoop-TCP    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:spork-streaming    文件:PigJobControl.java   
private State checkState(ControlledJob j) {
  try {
    return (State)checkState.invoke(j);
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
项目:spork-streaming    文件:PigJobControl.java   
private State submit(ControlledJob j) {
  try {
    return (State)submit.invoke(j);
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
项目:spork-streaming    文件:PigJobControl.java   
@SuppressWarnings("unchecked")
private LinkedList<ControlledJob> getJobs(Field field) {
  try {
    return (LinkedList<ControlledJob>)field.get(this);
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
项目:spork    文件:PigJobControl.java   
private State checkState(ControlledJob j) {
  try {
    return (State)checkState.invoke(j);
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
项目:spork    文件:PigJobControl.java   
private State submit(ControlledJob j) {
  try {
    return (State)submit.invoke(j);
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
项目:spork    文件:PigJobControl.java   
@SuppressWarnings("unchecked")
private LinkedList<ControlledJob> getJobs(Field field) {
  try {
    return (LinkedList<ControlledJob>)field.get(this);
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
项目:hardfs    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:hardfs    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:hadoop-on-lustre2    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:hadoop-on-lustre2    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:hanoi-hadoop-2.0.0-cdh    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:mapreduce-fork    文件:JobControl.java   
static ArrayList<Job> castToJobList(List<ControlledJob> cjobs) {
  ArrayList<Job> ret = new ArrayList<Job>();
  for (ControlledJob job : cjobs) {
    ret.add((Job)job);
  }
  return ret;
}
项目:mapreduce-fork    文件:ValueAggregatorJob.java   
public static JobControl createValueAggregatorJobs(String args[],
  Class<? extends ValueAggregatorDescriptor>[] descriptors) 
throws IOException {

  JobControl theControl = new JobControl("ValueAggregatorJobs");
  ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
  Configuration conf = new Configuration();
  if (descriptors != null) {
    conf = setAggregatorDescriptors(descriptors);
  }
  Job job = createValueAggregatorJob(conf, args);
  ControlledJob cjob = new ControlledJob(job, dependingJobs);
  theControl.addJob(cjob);
  return theControl;
}
项目:mapreduce-fork    文件:GridMixRunner.java   
public void addJob(int numReducers, boolean mapoutputCompressed,
    boolean outputCompressed, Size size, JobControl gridmix) {
  final String prop = String.format("streamSort.%sJobs.inputFiles", size);
  final String indir = 
    getInputDirsFor(prop, size.defaultPath(VARINFLTEXT));
  final String outdir = addTSSuffix("perf-out/stream-out-dir-" + size);

  StringBuffer sb = new StringBuffer();
  sb.append("-input ").append(indir).append(" ");
  sb.append("-output ").append(outdir).append(" ");
  sb.append("-mapper cat ");
  sb.append("-reducer cat ");
  sb.append("-numReduceTasks ").append(numReducers);
  String[] args = sb.toString().split(" ");

  clearDir(outdir);
  try {
    Configuration conf = StreamJob.createJob(args);
    conf.setBoolean(FileOutputFormat.COMPRESS, outputCompressed);
    conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
    Job job = new Job(conf, "GridmixStreamingSorter." + size);
    ControlledJob cjob = new ControlledJob(job, null);
    gridmix.addJob(cjob);
  } catch (Exception ex) {
    ex.printStackTrace();
  }
}
项目:mapreduce-fork    文件:GridMixRunner.java   
public void addJob(int numReducers, boolean mapoutputCompressed,
    boolean outputCompressed, Size size, JobControl gridmix) {
  final String prop = String.format("javaSort.%sJobs.inputFiles", size);
  final String indir = getInputDirsFor(prop,
                         size.defaultPath(VARINFLTEXT));
  final String outdir = addTSSuffix("perf-out/sort-out-dir-" + size);

  clearDir(outdir);

  try {
    Configuration conf = new Configuration();
    conf.setBoolean(FileOutputFormat.COMPRESS, outputCompressed);
    conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
    Job job = new Job(conf);
    job.setJarByClass(Sort.class);
    job.setJobName("GridmixJavaSorter." + size);
    job.setMapperClass(Mapper.class);
    job.setReducerClass(Reducer.class);

    job.setNumReduceTasks(numReducers);
    job.setInputFormatClass(KeyValueTextInputFormat.class);
    job.setOutputFormatClass(TextOutputFormat.class);

    job.setOutputKeyClass(org.apache.hadoop.io.Text.class);
    job.setOutputValueClass(org.apache.hadoop.io.Text.class);

    FileInputFormat.addInputPaths(job, indir);
    FileOutputFormat.setOutputPath(job, new Path(outdir));

    ControlledJob cjob = new ControlledJob(job, null);
    gridmix.addJob(cjob);
  } catch (Exception ex) {
    ex.printStackTrace();
  }
}
项目:mapreduce-fork    文件:GridMixRunner.java   
public void addJob(int numReducers, boolean mapoutputCompressed,
    boolean outputCompressed, Size size, JobControl gridmix) {
  final String prop = String.format("webdataScan.%sJobs.inputFiles", size);
  final String indir = getInputDirsFor(prop, size.defaultPath(VARCOMPSEQ));
  final String outdir = addTSSuffix("perf-out/webdata-scan-out-dir-"
                          + size);
  StringBuffer sb = new StringBuffer();
  sb.append("-keepmap 0.2 ");
  sb.append("-keepred 5 ");
  sb.append("-inFormat");
  sb.append(" org.apache.hadoop.mapreduce." +
    "lib.input.SequenceFileInputFormat ");
  sb.append("-outFormat");
  sb.append(" org.apache.hadoop.mapreduce." +
    "lib.output.SequenceFileOutputFormat ");
  sb.append("-outKey org.apache.hadoop.io.Text ");
  sb.append("-outValue org.apache.hadoop.io.Text ");
  sb.append("-indir ").append(indir).append(" ");
  sb.append("-outdir ").append(outdir).append(" ");
  sb.append("-r ").append(numReducers);

  String[] args = sb.toString().split(" ");
  clearDir(outdir);
  try {
    Job job = GenericMRLoadJobCreator.createJob(
        args, mapoutputCompressed, outputCompressed);
    job.setJobName("GridmixWebdatascan." + size);
    ControlledJob cjob = new ControlledJob(job, null);
    gridmix.addJob(cjob);
  } catch (Exception ex) {
    System.out.println(ex.getStackTrace());
  }
}
项目:mapreduce-fork    文件:GridMixRunner.java   
public void addJob(int numReducers, boolean mapoutputCompressed,
    boolean outputCompressed, Size size, JobControl gridmix) {
  final String prop = String.format("webdataSort.%sJobs.inputFiles", size);
  final String indir = getInputDirsFor(prop, size.defaultPath(VARCOMPSEQ));
  final String outdir = 
    addTSSuffix("perf-out/webdata-sort-out-dir-" + size);

  StringBuffer sb = new StringBuffer();
  sb.append("-keepmap 100 ");
  sb.append("-keepred 100 ");
  sb.append("-inFormat org.apache.hadoop.mapreduce." +
    "lib.input.SequenceFileInputFormat ");
  sb.append("-outFormat org.apache.hadoop.mapreduce." +
    "lib.output.SequenceFileOutputFormat ");
  sb.append("-outKey org.apache.hadoop.io.Text ");
  sb.append("-outValue org.apache.hadoop.io.Text ");
  sb.append("-indir ").append(indir).append(" ");
  sb.append("-outdir ").append(outdir).append(" ");
  sb.append("-r ").append(numReducers);

  String[] args = sb.toString().split(" ");
  clearDir(outdir);
  try {
    Job job = GenericMRLoadJobCreator.createJob(
        args, mapoutputCompressed, outputCompressed);
    job.setJobName("GridmixWebdataSort." + size);
    ControlledJob cjob = new ControlledJob(job, null);
    gridmix.addJob(cjob);
  } catch (Exception ex) {
    System.out.println(ex.getStackTrace());
  }
}
项目:hadoop    文件:Job.java   
/** 
 * Construct a job.
 * @param jobConf a mapred job configuration representing a job to be executed.
 * @param dependingJobs an array of jobs the current job depends on
 */
@SuppressWarnings("unchecked")
public Job(JobConf jobConf, ArrayList<?> dependingJobs) throws IOException {
  super(org.apache.hadoop.mapreduce.Job.getInstance(jobConf),
        (List<ControlledJob>) dependingJobs);
}
项目:aliyun-oss-hadoop-fs    文件:Job.java   
/** 
 * Construct a job.
 * @param jobConf a mapred job configuration representing a job to be executed.
 * @param dependingJobs an array of jobs the current job depends on
 */
@SuppressWarnings("unchecked")
public Job(JobConf jobConf, ArrayList<?> dependingJobs) throws IOException {
  super(org.apache.hadoop.mapreduce.Job.getInstance(jobConf),
        (List<ControlledJob>) dependingJobs);
}