Java 类org.apache.hadoop.mapreduce.TaskCompletionEvent 实例源码

项目:incubator-gobblin    文件:CompactionAvroJobConfigurator.java   
private static List<TaskCompletionEvent> getAllTaskCompletionEvent(Job completedJob) {
  List<TaskCompletionEvent> completionEvents = new LinkedList<>();

  while (true) {
    try {
      TaskCompletionEvent[] bunchOfEvents;
      bunchOfEvents = completedJob.getTaskCompletionEvents(completionEvents.size());
      if (bunchOfEvents == null || bunchOfEvents.length == 0) {
        break;
      }
      completionEvents.addAll(Arrays.asList(bunchOfEvents));
    } catch (IOException e) {
      break;
    }
  }

  return completionEvents;
}
项目:incubator-gobblin    文件:CompactionAvroJobConfigurator.java   
/**
 * Remove all bad paths caused by speculative execution
 * The problem happens when speculative task attempt initialized but then killed in the middle of processing.
 * Some partial file was generated at {tmp_output}/_temporary/1/_temporary/attempt_xxx_xxx/part-m-xxxx.avro,
 * without being committed to its final destination at {tmp_output}/part-m-xxxx.avro.
 *
 * @param job Completed MR job
 * @param fs File system that can handle file system
 * @return all successful paths
 */
public static List<Path> removeFailedPaths(Job job, Path tmpPath, FileSystem fs) throws IOException {
  List<TaskCompletionEvent> failedEvents = CompactionAvroJobConfigurator.getUnsuccessfulTaskCompletionEvent(job);

  List<Path> allFilePaths = DatasetHelper.getApplicableFilePaths(fs, tmpPath, Lists.newArrayList("avro"));
  List<Path> goodPaths = new ArrayList<>();
  for (Path filePath: allFilePaths) {
    if (CompactionAvroJobConfigurator.isFailedPath(filePath, failedEvents)) {
      fs.delete(filePath, false);
      log.error("{} is a bad path so it was deleted", filePath);
    } else {
      goodPaths.add(filePath);
    }
  }

  return goodPaths;
}
项目:hadoop    文件:CLI.java   
/**
 * List the events for the given job
 * @param jobId the job id for the job's events to list
 * @throws IOException
 */
private void listEvents(Job job, int fromEventId, int numEvents)
    throws IOException, InterruptedException {
  TaskCompletionEvent[] events = job.
    getTaskCompletionEvents(fromEventId, numEvents);
  System.out.println("Task completion events for " + job.getJobID());
  System.out.println("Number of events (from " + fromEventId + ") are: " 
    + events.length);
  for(TaskCompletionEvent event: events) {
    System.out.println(event.getStatus() + " " + 
      event.getTaskAttemptId() + " " + 
      getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
  }
}
项目:aliyun-oss-hadoop-fs    文件:CLI.java   
/**
 * List the events for the given job
 * @param jobId the job id for the job's events to list
 * @throws IOException
 */
private void listEvents(Job job, int fromEventId, int numEvents)
    throws IOException, InterruptedException {
  TaskCompletionEvent[] events = job.
    getTaskCompletionEvents(fromEventId, numEvents);
  System.out.println("Task completion events for " + job.getJobID());
  System.out.println("Number of events (from " + fromEventId + ") are: " 
    + events.length);
  for(TaskCompletionEvent event: events) {
    System.out.println(event.getStatus() + " " + 
      event.getTaskAttemptId() + " " + 
      getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
  }
}
项目:big-c    文件:CLI.java   
/**
 * List the events for the given job
 * @param jobId the job id for the job's events to list
 * @throws IOException
 */
private void listEvents(Job job, int fromEventId, int numEvents)
    throws IOException, InterruptedException {
  TaskCompletionEvent[] events = job.
    getTaskCompletionEvents(fromEventId, numEvents);
  System.out.println("Task completion events for " + job.getJobID());
  System.out.println("Number of events (from " + fromEventId + ") are: " 
    + events.length);
  for(TaskCompletionEvent event: events) {
    System.out.println(event.getStatus() + " " + 
      event.getTaskAttemptId() + " " + 
      getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
  }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:CLI.java   
/**
 * List the events for the given job
 * @param jobId the job id for the job's events to list
 * @throws IOException
 */
private void listEvents(Job job, int fromEventId, int numEvents)
    throws IOException, InterruptedException {
  TaskCompletionEvent[] events = job.
    getTaskCompletionEvents(fromEventId, numEvents);
  System.out.println("Task completion events for " + job.getJobID());
  System.out.println("Number of events (from " + fromEventId + ") are: " 
    + events.length);
  for(TaskCompletionEvent event: events) {
    System.out.println(event.getStatus() + " " + 
      event.getTaskAttemptId() + " " + 
      getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
  }
}
项目:hadoop-plus    文件:CLI.java   
/**
 * List the events for the given job
 * @param jobId the job id for the job's events to list
 * @throws IOException
 */
private void listEvents(Job job, int fromEventId, int numEvents)
    throws IOException, InterruptedException {
  TaskCompletionEvent[] events = job.
    getTaskCompletionEvents(fromEventId, numEvents);
  System.out.println("Task completion events for " + job.getJobID());
  System.out.println("Number of events (from " + fromEventId + ") are: " 
    + events.length);
  for(TaskCompletionEvent event: events) {
    System.out.println(event.getStatus() + " " + 
      event.getTaskAttemptId() + " " + 
      getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
  }
}
项目:FlexMap    文件:CLI.java   
/**
 * List the events for the given job
 * @param jobId the job id for the job's events to list
 * @throws IOException
 */
private void listEvents(Job job, int fromEventId, int numEvents)
    throws IOException, InterruptedException {
  TaskCompletionEvent[] events = job.
    getTaskCompletionEvents(fromEventId, numEvents);
  System.out.println("Task completion events for " + job.getJobID());
  System.out.println("Number of events (from " + fromEventId + ") are: " 
    + events.length);
  for(TaskCompletionEvent event: events) {
    System.out.println(event.getStatus() + " " + 
      event.getTaskAttemptId() + " " + 
      getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
  }
}
项目:hServer    文件:HServerClientProtocol.java   
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobID, int i, int i2) throws IOException, InterruptedException {
    if (submittedJobs.containsKey(org.apache.hadoop.mapred.JobID.downgrade(jobID))) {
        return new TaskCompletionEvent[0];
    } else {
        return backupRunner.getTaskCompletionEvents(jobID, i, i2);
    }
}
项目:hServer    文件:HServerClientProtocol.java   
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobID, int i, int i2) throws IOException, InterruptedException {
    if (submittedJobs.containsKey(org.apache.hadoop.mapred.JobID.downgrade(jobID))) {
        return new TaskCompletionEvent[0];
    } else {
        return backupRunner.getTaskCompletionEvents(jobID, i, i2);
    }
}
项目:hServer    文件:HServerClientProtocol.java   
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobID, int i, int i2) throws IOException, InterruptedException {
    if (submittedJobs.containsKey(org.apache.hadoop.mapred.JobID.downgrade(jobID))) {
        return new TaskCompletionEvent[0];
    } else {
        return backupRunner.getTaskCompletionEvents(jobID, i, i2);
    }
}
项目:hServer    文件:HServerClientProtocol.java   
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobID, int i, int i2) throws IOException, InterruptedException {
    if (submittedJobs.containsKey(org.apache.hadoop.mapred.JobID.downgrade(jobID))) {
        return new TaskCompletionEvent[0];
    } else {
        return backupRunner.getTaskCompletionEvents(jobID, i, i2);
    }
}
项目:hServer    文件:HServerClientProtocol.java   
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobID, int i, int i2) throws IOException, InterruptedException {
    if (submittedJobs.containsKey(org.apache.hadoop.mapred.JobID.downgrade(jobID))) {
        return new TaskCompletionEvent[0];
    } else {
        return backupRunner.getTaskCompletionEvents(jobID, i, i2);
    }
}
项目:hops    文件:CLI.java   
/**
 * List the events for the given job
 * @param job the job to list
 * @param fromEventId event id for the job's events to list from
 * @param numEvents number of events we want to list
 * @throws IOException
 */
private void listEvents(Job job, int fromEventId, int numEvents)
    throws IOException, InterruptedException {
  TaskCompletionEvent[] events = job.
    getTaskCompletionEvents(fromEventId, numEvents);
  System.out.println("Task completion events for " + job.getJobID());
  System.out.println("Number of events (from " + fromEventId + ") are: " 
    + events.length);
  for(TaskCompletionEvent event: events) {
    System.out.println(event.getStatus() + " " + 
      event.getTaskAttemptId() + " " + 
      getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
  }
}
项目:hadoop-TCP    文件:CLI.java   
/**
 * List the events for the given job
 * @param jobId the job id for the job's events to list
 * @throws IOException
 */
private void listEvents(Job job, int fromEventId, int numEvents)
    throws IOException, InterruptedException {
  TaskCompletionEvent[] events = job.
    getTaskCompletionEvents(fromEventId, numEvents);
  System.out.println("Task completion events for " + job.getJobID());
  System.out.println("Number of events (from " + fromEventId + ") are: " 
    + events.length);
  for(TaskCompletionEvent event: events) {
    System.out.println(event.getStatus() + " " + 
      event.getTaskAttemptId() + " " + 
      getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
  }
}
项目:hardfs    文件:CLI.java   
/**
 * List the events for the given job
 * @param jobId the job id for the job's events to list
 * @throws IOException
 */
private void listEvents(Job job, int fromEventId, int numEvents)
    throws IOException, InterruptedException {
  TaskCompletionEvent[] events = job.
    getTaskCompletionEvents(fromEventId, numEvents);
  System.out.println("Task completion events for " + job.getJobID());
  System.out.println("Number of events (from " + fromEventId + ") are: " 
    + events.length);
  for(TaskCompletionEvent event: events) {
    System.out.println(event.getStatus() + " " + 
      event.getTaskAttemptId() + " " + 
      getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
  }
}
项目:hadoop-on-lustre2    文件:CLI.java   
/**
 * List the events for the given job
 * @param jobId the job id for the job's events to list
 * @throws IOException
 */
private void listEvents(Job job, int fromEventId, int numEvents)
    throws IOException, InterruptedException {
  TaskCompletionEvent[] events = job.
    getTaskCompletionEvents(fromEventId, numEvents);
  System.out.println("Task completion events for " + job.getJobID());
  System.out.println("Number of events (from " + fromEventId + ") are: " 
    + events.length);
  for(TaskCompletionEvent event: events) {
    System.out.println(event.getStatus() + " " + 
      event.getTaskAttemptId() + " " + 
      getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
  }
}
项目:incubator-tez    文件:ClientServiceDelegate.java   
public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobId,
    int fromEventId, int maxEvents)
    throws IOException, InterruptedException {
  // FIXME seems like there is support in client to query task failure
  // related information
  // However, api does not make sense for DAG
  return new TaskCompletionEvent[0];
}
项目:mapreduce-fork    文件:CLI.java   
/**
 * List the events for the given job
 * @param jobId the job id for the job's events to list
 * @throws IOException
 */
private void listEvents(Job job, int fromEventId, int numEvents)
    throws IOException, InterruptedException {
  TaskCompletionEvent[] events = job.
    getTaskCompletionEvents(fromEventId, numEvents);
  System.out.println("Task completion events for " + job.getJobID());
  System.out.println("Number of events (from " + fromEventId + ") are: " 
    + events.length);
  for(TaskCompletionEvent event: events) {
    System.out.println(event.getStatus() + " " + 
      event.getTaskAttemptId() + " " + 
      getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
  }
}
项目:tez    文件:ClientServiceDelegate.java   
public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobId,
    int fromEventId, int maxEvents)
    throws IOException, InterruptedException {
  // FIXME seems like there is support in client to query task failure
  // related information
  // However, api does not make sense for DAG
  return new TaskCompletionEvent[0];
}
项目:big_data    文件:YARNRunner.java   
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1, int arg2)
        throws IOException, InterruptedException {
    return clientCache.getClient(arg0).getTaskCompletionEvents(arg0, arg1, arg2);
}
项目:hadoop    文件:YARNRunner.java   
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1,
    int arg2) throws IOException, InterruptedException {
  return clientCache.getClient(arg0).getTaskCompletionEvents(arg0, arg1, arg2);
}
项目:hadoop    文件:LocalJobRunner.java   
public MapTaskCompletionEventsUpdate getMapCompletionEvents(JobID jobId, 
    int fromEventId, int maxLocs, TaskAttemptID id) throws IOException {
  return new MapTaskCompletionEventsUpdate(
    org.apache.hadoop.mapred.TaskCompletionEvent.EMPTY_ARRAY, false);
}
项目:hadoop    文件:LocalJobRunner.java   
public TaskCompletionEvent[] getTaskCompletionEvents(
    org.apache.hadoop.mapreduce.JobID jobid
    , int fromEventId, int maxEvents) throws IOException {
  return TaskCompletionEvent.EMPTY_ARRAY;
}
项目:aliyun-oss-hadoop-fs    文件:YARNRunner.java   
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1,
    int arg2) throws IOException, InterruptedException {
  return clientCache.getClient(arg0).getTaskCompletionEvents(arg0, arg1, arg2);
}
项目:aliyun-oss-hadoop-fs    文件:LocalJobRunner.java   
@Override
public MapTaskCompletionEventsUpdate getMapCompletionEvents(JobID jobId, 
    int fromEventId, int maxLocs, TaskAttemptID id) throws IOException {
  return new MapTaskCompletionEventsUpdate(
    org.apache.hadoop.mapred.TaskCompletionEvent.EMPTY_ARRAY, false);
}
项目:aliyun-oss-hadoop-fs    文件:LocalJobRunner.java   
public TaskCompletionEvent[] getTaskCompletionEvents(
    org.apache.hadoop.mapreduce.JobID jobid
    , int fromEventId, int maxEvents) throws IOException {
  return TaskCompletionEvent.EMPTY_ARRAY;
}
项目:big-c    文件:YARNRunner.java   
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1,
    int arg2) throws IOException, InterruptedException {
  return clientCache.getClient(arg0).getTaskCompletionEvents(arg0, arg1, arg2);
}
项目:big-c    文件:LocalJobRunner.java   
public MapTaskCompletionEventsUpdate getMapCompletionEvents(JobID jobId, 
    int fromEventId, int maxLocs, TaskAttemptID id) throws IOException {
  return new MapTaskCompletionEventsUpdate(
    org.apache.hadoop.mapred.TaskCompletionEvent.EMPTY_ARRAY, false);
}
项目:big-c    文件:LocalJobRunner.java   
public TaskCompletionEvent[] getTaskCompletionEvents(
    org.apache.hadoop.mapreduce.JobID jobid
    , int fromEventId, int maxEvents) throws IOException {
  return TaskCompletionEvent.EMPTY_ARRAY;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:YARNRunner.java   
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1,
    int arg2) throws IOException, InterruptedException {
  return clientCache.getClient(arg0).getTaskCompletionEvents(arg0, arg1, arg2);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:LocalJobRunner.java   
public MapTaskCompletionEventsUpdate getMapCompletionEvents(JobID jobId, 
    int fromEventId, int maxLocs, TaskAttemptID id) throws IOException {
  return new MapTaskCompletionEventsUpdate(
    org.apache.hadoop.mapred.TaskCompletionEvent.EMPTY_ARRAY, false);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:LocalJobRunner.java   
public TaskCompletionEvent[] getTaskCompletionEvents(
    org.apache.hadoop.mapreduce.JobID jobid
    , int fromEventId, int maxEvents) throws IOException {
  return TaskCompletionEvent.EMPTY_ARRAY;
}
项目:hadoop-plus    文件:YARNRunner.java   
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1,
    int arg2) throws IOException, InterruptedException {
  return clientCache.getClient(arg0).getTaskCompletionEvents(arg0, arg1, arg2);
}
项目:hadoop-plus    文件:LocalJobRunner.java   
public MapTaskCompletionEventsUpdate getMapCompletionEvents(JobID jobId, 
    int fromEventId, int maxLocs, TaskAttemptID id) throws IOException {
  return new MapTaskCompletionEventsUpdate(
    org.apache.hadoop.mapred.TaskCompletionEvent.EMPTY_ARRAY, false);
}
项目:hadoop-plus    文件:LocalJobRunner.java   
public TaskCompletionEvent[] getTaskCompletionEvents(
    org.apache.hadoop.mapreduce.JobID jobid
    , int fromEventId, int maxEvents) throws IOException {
  return TaskCompletionEvent.EMPTY_ARRAY;
}
项目:FlexMap    文件:YARNRunner.java   
@Override
public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1,
    int arg2) throws IOException, InterruptedException {
  return clientCache.getClient(arg0).getTaskCompletionEvents(arg0, arg1, arg2);
}
项目:FlexMap    文件:LocalJobRunner.java   
public MapTaskCompletionEventsUpdate getMapCompletionEvents(JobID jobId, 
    int fromEventId, int maxLocs, TaskAttemptID id) throws IOException {
  return new MapTaskCompletionEventsUpdate(
    org.apache.hadoop.mapred.TaskCompletionEvent.EMPTY_ARRAY, false);
}
项目:FlexMap    文件:LocalJobRunner.java   
public TaskCompletionEvent[] getTaskCompletionEvents(
    org.apache.hadoop.mapreduce.JobID jobid
    , int fromEventId, int maxEvents) throws IOException {
  return TaskCompletionEvent.EMPTY_ARRAY;
}
项目:ignite    文件:HadoopClientProtocol.java   
/** {@inheritDoc} */
@Override public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobid, int fromEventId, int maxEvents)
    throws IOException, InterruptedException {
    return new TaskCompletionEvent[0];
}