Java 类org.apache.hadoop.mapreduce.jobhistory.TaskFailedEvent 实例源码

项目:hadoop    文件:TaskImpl.java   
private static TaskFailedEvent createTaskFailedEvent(TaskImpl task, List<String> diag, TaskStateInternal taskState, TaskAttemptId taId) {
  StringBuilder errorSb = new StringBuilder();
  if (diag != null) {
    for (String d : diag) {
      errorSb.append(", ").append(d);
    }
  }
  TaskFailedEvent taskFailedEvent = new TaskFailedEvent(
      TypeConverter.fromYarn(task.taskId),
   // Hack since getFinishTime needs isFinished to be true and that doesn't happen till after the transition.
      task.getFinishTime(taId),
      TypeConverter.fromYarn(task.getType()),
      errorSb.toString(),
      taskState.toString(),
      taId == null ? null : TypeConverter.fromYarn(taId),
      task.getCounters());
  return taskFailedEvent;
}
项目:hadoop    文件:TaskImpl.java   
@Override
public TaskStateInternal transition(TaskImpl task, TaskEvent event) {
  TaskAttemptId taskAttemptId =
      ((TaskTAttemptEvent) event).getTaskAttemptID();
  task.handleTaskAttemptCompletion(taskAttemptId, taCompletionEventStatus);
  task.finishedAttempts.add(taskAttemptId);
  // check whether all attempts are finished
  if (task.finishedAttempts.size() == task.attempts.size()) {
    if (task.historyTaskStartGenerated) {
    TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
          finalState, null); // TODO JH verify failedAttempt null
    task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
        taskFailedEvent)); 
    } else {
      LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
    }

    task.eventHandler.handle(
        new JobTaskEvent(task.taskId, getExternalState(finalState)));
    return finalState;
  }
  return task.getInternalState();
}
项目:hadoop    文件:TaskImpl.java   
@Override
public void transition(TaskImpl task, TaskEvent event) {

  if (task.historyTaskStartGenerated) {
  TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
        TaskStateInternal.KILLED, null); // TODO Verify failedAttemptId is null
  task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
      taskFailedEvent));
  }else {
    LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
  }

  task.eventHandler.handle(new JobTaskEvent(task.taskId,
      getExternalState(TaskStateInternal.KILLED)));
  task.metrics.endWaitingTask(task);
}
项目:hadoop    文件:JobBuilder.java   
private void processTaskFailedEvent(TaskFailedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), false);
  if (task == null) {
    return;
  }
  task.setFinishTime(event.getFinishTime());
  task.setTaskStatus(getPre21Value(event.getTaskStatus()));
  TaskFailed t = (TaskFailed)(event.getDatum());
  task.putDiagnosticInfo(t.error.toString());
  task.putFailedDueToAttemptId(t.failedDueToAttempt.toString());
  org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
      ((TaskFailed) event.getDatum()).counters;
  task.incorporateCounters(
      counters == null ? EMPTY_COUNTERS : counters);
}
项目:aliyun-oss-hadoop-fs    文件:TaskImpl.java   
private static TaskFailedEvent createTaskFailedEvent(TaskImpl task, List<String> diag, TaskStateInternal taskState, TaskAttemptId taId) {
  StringBuilder errorSb = new StringBuilder();
  if (diag != null) {
    for (String d : diag) {
      errorSb.append(", ").append(d);
    }
  }
  TaskFailedEvent taskFailedEvent = new TaskFailedEvent(
      TypeConverter.fromYarn(task.taskId),
   // Hack since getFinishTime needs isFinished to be true and that doesn't happen till after the transition.
      task.getFinishTime(taId),
      TypeConverter.fromYarn(task.getType()),
      errorSb.toString(),
      taskState.toString(),
      taId == null ? null : TypeConverter.fromYarn(taId),
      task.getCounters());
  return taskFailedEvent;
}
项目:aliyun-oss-hadoop-fs    文件:TaskImpl.java   
@Override
public TaskStateInternal transition(TaskImpl task, TaskEvent event) {
  TaskAttemptId taskAttemptId =
      ((TaskTAttemptEvent) event).getTaskAttemptID();
  task.handleTaskAttemptCompletion(taskAttemptId, taCompletionEventStatus);
  task.finishedAttempts.add(taskAttemptId);
  // check whether all attempts are finished
  if (task.finishedAttempts.size() == task.attempts.size()) {
    if (task.historyTaskStartGenerated) {
    TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
          finalState, null); // TODO JH verify failedAttempt null
    task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
        taskFailedEvent)); 
    } else {
      LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
    }

    task.eventHandler.handle(
        new JobTaskEvent(task.taskId, getExternalState(finalState)));
    return finalState;
  }
  return task.getInternalState();
}
项目:aliyun-oss-hadoop-fs    文件:TaskImpl.java   
@Override
public void transition(TaskImpl task, TaskEvent event) {

  if (task.historyTaskStartGenerated) {
  TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
        TaskStateInternal.KILLED, null); // TODO Verify failedAttemptId is null
  task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
      taskFailedEvent));
  }else {
    LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
  }

  task.eventHandler.handle(new JobTaskEvent(task.taskId,
      getExternalState(TaskStateInternal.KILLED)));
  task.metrics.endWaitingTask(task);
}
项目:aliyun-oss-hadoop-fs    文件:JobBuilder.java   
private void processTaskFailedEvent(TaskFailedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), false);
  if (task == null) {
    return;
  }
  task.setFinishTime(event.getFinishTime());
  task.setTaskStatus(getPre21Value(event.getTaskStatus()));
  TaskFailed t = (TaskFailed)(event.getDatum());
  task.putDiagnosticInfo(t.error.toString());
  task.putFailedDueToAttemptId(t.failedDueToAttempt.toString());
  org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
      ((TaskFailed) event.getDatum()).counters;
  task.incorporateCounters(
      counters == null ? EMPTY_COUNTERS : counters);
}
项目:big-c    文件:TaskImpl.java   
private static TaskFailedEvent createTaskFailedEvent(TaskImpl task, List<String> diag, TaskStateInternal taskState, TaskAttemptId taId) {
  StringBuilder errorSb = new StringBuilder();
  if (diag != null) {
    for (String d : diag) {
      errorSb.append(", ").append(d);
    }
  }
  TaskFailedEvent taskFailedEvent = new TaskFailedEvent(
      TypeConverter.fromYarn(task.taskId),
   // Hack since getFinishTime needs isFinished to be true and that doesn't happen till after the transition.
      task.getFinishTime(taId),
      TypeConverter.fromYarn(task.getType()),
      errorSb.toString(),
      taskState.toString(),
      taId == null ? null : TypeConverter.fromYarn(taId),
      task.getCounters());
  return taskFailedEvent;
}
项目:big-c    文件:TaskImpl.java   
@Override
public TaskStateInternal transition(TaskImpl task, TaskEvent event) {
  TaskAttemptId taskAttemptId =
      ((TaskTAttemptEvent) event).getTaskAttemptID();
  task.handleTaskAttemptCompletion(taskAttemptId, taCompletionEventStatus);
  task.finishedAttempts.add(taskAttemptId);
  // check whether all attempts are finished
  if (task.finishedAttempts.size() == task.attempts.size()) {
    if (task.historyTaskStartGenerated) {
    TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
          finalState, null); // TODO JH verify failedAttempt null
    task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
        taskFailedEvent)); 
    } else {
      LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
    }

    task.eventHandler.handle(
        new JobTaskEvent(task.taskId, getExternalState(finalState)));
    return finalState;
  }
  return task.getInternalState();
}
项目:big-c    文件:TaskImpl.java   
@Override
public void transition(TaskImpl task, TaskEvent event) {

  if (task.historyTaskStartGenerated) {
  TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
        TaskStateInternal.KILLED, null); // TODO Verify failedAttemptId is null
  task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
      taskFailedEvent));
  }else {
    LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
  }

  task.eventHandler.handle(new JobTaskEvent(task.taskId,
      getExternalState(TaskStateInternal.KILLED)));
  task.metrics.endWaitingTask(task);
}
项目:big-c    文件:JobBuilder.java   
private void processTaskFailedEvent(TaskFailedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), false);
  if (task == null) {
    return;
  }
  task.setFinishTime(event.getFinishTime());
  task.setTaskStatus(getPre21Value(event.getTaskStatus()));
  TaskFailed t = (TaskFailed)(event.getDatum());
  task.putDiagnosticInfo(t.error.toString());
  task.putFailedDueToAttemptId(t.failedDueToAttempt.toString());
  org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
      ((TaskFailed) event.getDatum()).counters;
  task.incorporateCounters(
      counters == null ? EMPTY_COUNTERS : counters);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TaskImpl.java   
private static TaskFailedEvent createTaskFailedEvent(TaskImpl task, List<String> diag, TaskStateInternal taskState, TaskAttemptId taId) {
  StringBuilder errorSb = new StringBuilder();
  if (diag != null) {
    for (String d : diag) {
      errorSb.append(", ").append(d);
    }
  }
  TaskFailedEvent taskFailedEvent = new TaskFailedEvent(
      TypeConverter.fromYarn(task.taskId),
   // Hack since getFinishTime needs isFinished to be true and that doesn't happen till after the transition.
      task.getFinishTime(taId),
      TypeConverter.fromYarn(task.getType()),
      errorSb.toString(),
      taskState.toString(),
      taId == null ? null : TypeConverter.fromYarn(taId),
      task.getCounters());
  return taskFailedEvent;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TaskImpl.java   
@Override
public TaskStateInternal transition(TaskImpl task, TaskEvent event) {
  TaskAttemptId taskAttemptId =
      ((TaskTAttemptEvent) event).getTaskAttemptID();
  task.handleTaskAttemptCompletion(taskAttemptId, taCompletionEventStatus);
  task.finishedAttempts.add(taskAttemptId);
  // check whether all attempts are finished
  if (task.finishedAttempts.size() == task.attempts.size()) {
    if (task.historyTaskStartGenerated) {
    TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
          finalState, null); // TODO JH verify failedAttempt null
    task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
        taskFailedEvent)); 
    } else {
      LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
    }

    task.eventHandler.handle(
        new JobTaskEvent(task.taskId, getExternalState(finalState)));
    return finalState;
  }
  return task.getInternalState();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TaskImpl.java   
@Override
public void transition(TaskImpl task, TaskEvent event) {

  if (task.historyTaskStartGenerated) {
  TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
        TaskStateInternal.KILLED, null); // TODO Verify failedAttemptId is null
  task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
      taskFailedEvent));
  }else {
    LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
  }

  task.eventHandler.handle(new JobTaskEvent(task.taskId,
      getExternalState(TaskStateInternal.KILLED)));
  task.metrics.endWaitingTask(task);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:JobBuilder.java   
private void processTaskFailedEvent(TaskFailedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), false);
  if (task == null) {
    return;
  }
  task.setFinishTime(event.getFinishTime());
  task.setTaskStatus(getPre21Value(event.getTaskStatus()));
  TaskFailed t = (TaskFailed)(event.getDatum());
  task.putDiagnosticInfo(t.error.toString());
  task.putFailedDueToAttemptId(t.failedDueToAttempt.toString());
  org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
      ((TaskFailed) event.getDatum()).counters;
  task.incorporateCounters(
      counters == null ? EMPTY_COUNTERS : counters);
}
项目:hadoop-plus    文件:TaskImpl.java   
private static TaskFailedEvent createTaskFailedEvent(TaskImpl task, List<String> diag, TaskStateInternal taskState, TaskAttemptId taId) {
  StringBuilder errorSb = new StringBuilder();
  if (diag != null) {
    for (String d : diag) {
      errorSb.append(", ").append(d);
    }
  }
  TaskFailedEvent taskFailedEvent = new TaskFailedEvent(
      TypeConverter.fromYarn(task.taskId),
   // Hack since getFinishTime needs isFinished to be true and that doesn't happen till after the transition.
      task.getFinishTime(taId),
      TypeConverter.fromYarn(task.getType()),
      errorSb.toString(),
      taskState.toString(),
      taId == null ? null : TypeConverter.fromYarn(taId),
      task.getCounters());
  return taskFailedEvent;
}
项目:hadoop-plus    文件:TaskImpl.java   
@Override
public TaskStateInternal transition(TaskImpl task, TaskEvent event) {
  TaskAttemptId taskAttemptId =
      ((TaskTAttemptEvent) event).getTaskAttemptID();
  task.handleTaskAttemptCompletion(taskAttemptId, taCompletionEventStatus);
  task.finishedAttempts.add(taskAttemptId);
  // check whether all attempts are finished
  if (task.finishedAttempts.size() == task.attempts.size()) {
    if (task.historyTaskStartGenerated) {
    TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
          finalState, null); // TODO JH verify failedAttempt null
    task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
        taskFailedEvent)); 
    } else {
      LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
    }

    task.eventHandler.handle(
        new JobTaskEvent(task.taskId, getExternalState(finalState)));
    return finalState;
  }
  return task.getInternalState();
}
项目:hadoop-plus    文件:TaskImpl.java   
@Override
public void transition(TaskImpl task, TaskEvent event) {

  if (task.historyTaskStartGenerated) {
  TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
        TaskStateInternal.KILLED, null); // TODO Verify failedAttemptId is null
  task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
      taskFailedEvent));
  }else {
    LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
  }

  task.eventHandler.handle(new JobTaskEvent(task.taskId,
      getExternalState(TaskStateInternal.KILLED)));
  task.metrics.endWaitingTask(task);
}
项目:hadoop-plus    文件:JobBuilder.java   
private void processTaskFailedEvent(TaskFailedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), false);
  if (task == null) {
    return;
  }
  task.setFinishTime(event.getFinishTime());
  task.setTaskStatus(getPre21Value(event.getTaskStatus()));
  TaskFailed t = (TaskFailed)(event.getDatum());
  task.putDiagnosticInfo(t.error.toString());
  task.putFailedDueToAttemptId(t.failedDueToAttempt.toString());
  org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
      ((TaskFailed) event.getDatum()).counters;
  task.incorporateCounters(
      counters == null ? EMPTY_COUNTERS : counters);
}
项目:FlexMap    文件:TaskImpl.java   
private static TaskFailedEvent createTaskFailedEvent(TaskImpl task, List<String> diag, TaskStateInternal taskState, TaskAttemptId taId) {
  StringBuilder errorSb = new StringBuilder();
  if (diag != null) {
    for (String d : diag) {
      errorSb.append(", ").append(d);
    }
  }
  TaskFailedEvent taskFailedEvent = new TaskFailedEvent(
      TypeConverter.fromYarn(task.taskId),
   // Hack since getFinishTime needs isFinished to be true and that doesn't happen till after the transition.
      task.getFinishTime(taId),
      TypeConverter.fromYarn(task.getType()),
      errorSb.toString(),
      taskState.toString(),
      taId == null ? null : TypeConverter.fromYarn(taId),
      task.getCounters());
  return taskFailedEvent;
}
项目:FlexMap    文件:TaskImpl.java   
@Override
public TaskStateInternal transition(TaskImpl task, TaskEvent event) {
  TaskAttemptId taskAttemptId =
      ((TaskTAttemptEvent) event).getTaskAttemptID();
  LOG.info("receive attempt killed from"+task.getID().toString());

  task.handleTaskAttemptCompletion(taskAttemptId, taCompletionEventStatus);
  task.finishedAttempts.add(taskAttemptId);
  // check whether all attempts are finished
  if (task.finishedAttempts.size() == task.attempts.size()) {
    if (task.historyTaskStartGenerated) {
    TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
          finalState, null); // TODO JH verify failedAttempt null
    task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
        taskFailedEvent)); 
    } else {
      LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
    }

    task.eventHandler.handle(
        new JobTaskEvent(task.taskId, getExternalState(finalState)));
    return finalState;
  }
  return task.getInternalState();
}
项目:FlexMap    文件:TaskImpl.java   
@Override
public void transition(TaskImpl task, TaskEvent event) {

  if (task.historyTaskStartGenerated) {
  TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
        TaskStateInternal.KILLED, null); // TODO Verify failedAttemptId is null
  task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
      taskFailedEvent));
  }else {
    LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
  }

  task.eventHandler.handle(new JobTaskEvent(task.taskId,
      getExternalState(TaskStateInternal.KILLED)));
  task.metrics.endWaitingTask(task);
}
项目:hops    文件:TaskImpl.java   
private static TaskFailedEvent createTaskFailedEvent(TaskImpl task, List<String> diag, TaskStateInternal taskState, TaskAttemptId taId) {
  StringBuilder errorSb = new StringBuilder();
  if (diag != null) {
    for (String d : diag) {
      errorSb.append(", ").append(d);
    }
  }
  TaskFailedEvent taskFailedEvent = new TaskFailedEvent(
      TypeConverter.fromYarn(task.taskId),
   // Hack since getFinishTime needs isFinished to be true and that doesn't happen till after the transition.
      task.getFinishTime(taId),
      TypeConverter.fromYarn(task.getType()),
      errorSb.toString(),
      taskState.toString(),
      taId == null ? null : TypeConverter.fromYarn(taId),
      task.getCounters());
  return taskFailedEvent;
}
项目:hops    文件:TaskImpl.java   
@Override
public TaskStateInternal transition(TaskImpl task, TaskEvent event) {
  TaskAttemptId taskAttemptId =
      ((TaskTAttemptEvent) event).getTaskAttemptID();
  task.handleTaskAttemptCompletion(taskAttemptId, taCompletionEventStatus);
  task.finishedAttempts.add(taskAttemptId);
  // check whether all attempts are finished
  if (task.finishedAttempts.size() == task.attempts.size()) {
    if (task.historyTaskStartGenerated) {
    TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
          finalState, null); // TODO JH verify failedAttempt null
    task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
        taskFailedEvent)); 
    } else {
      LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
    }

    task.eventHandler.handle(
        new JobTaskEvent(task.taskId, getExternalState(finalState)));
    return finalState;
  }
  return task.getInternalState();
}
项目:hops    文件:TaskImpl.java   
@Override
public void transition(TaskImpl task, TaskEvent event) {

  if (task.historyTaskStartGenerated) {
  TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
        TaskStateInternal.KILLED, null); // TODO Verify failedAttemptId is null
  task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
      taskFailedEvent));
  }else {
    LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
  }

  task.eventHandler.handle(new JobTaskEvent(task.taskId,
      getExternalState(TaskStateInternal.KILLED)));
  task.metrics.endWaitingTask(task);
}
项目:hops    文件:JobBuilder.java   
private void processTaskFailedEvent(TaskFailedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), false);
  if (task == null) {
    return;
  }
  task.setFinishTime(event.getFinishTime());
  task.setTaskStatus(getPre21Value(event.getTaskStatus()));
  TaskFailed t = (TaskFailed)(event.getDatum());
  task.putDiagnosticInfo(t.error.toString());
  task.putFailedDueToAttemptId(t.failedDueToAttempt.toString());
  org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
      ((TaskFailed) event.getDatum()).counters;
  task.incorporateCounters(
      counters == null ? EMPTY_COUNTERS : counters);
}
项目:hadoop-TCP    文件:TaskImpl.java   
private static TaskFailedEvent createTaskFailedEvent(TaskImpl task, List<String> diag, TaskStateInternal taskState, TaskAttemptId taId) {
  StringBuilder errorSb = new StringBuilder();
  if (diag != null) {
    for (String d : diag) {
      errorSb.append(", ").append(d);
    }
  }
  TaskFailedEvent taskFailedEvent = new TaskFailedEvent(
      TypeConverter.fromYarn(task.taskId),
   // Hack since getFinishTime needs isFinished to be true and that doesn't happen till after the transition.
      task.getFinishTime(taId),
      TypeConverter.fromYarn(task.getType()),
      errorSb.toString(),
      taskState.toString(),
      taId == null ? null : TypeConverter.fromYarn(taId),
      task.getCounters());
  return taskFailedEvent;
}
项目:hadoop-TCP    文件:TaskImpl.java   
@Override
public TaskStateInternal transition(TaskImpl task, TaskEvent event) {
  TaskAttemptId taskAttemptId =
      ((TaskTAttemptEvent) event).getTaskAttemptID();
  task.handleTaskAttemptCompletion(taskAttemptId, taCompletionEventStatus);
  task.finishedAttempts.add(taskAttemptId);
  // check whether all attempts are finished
  if (task.finishedAttempts.size() == task.attempts.size()) {
    if (task.historyTaskStartGenerated) {
    TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
          finalState, null); // TODO JH verify failedAttempt null
    task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
        taskFailedEvent)); 
    } else {
      LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
    }

    task.eventHandler.handle(
        new JobTaskEvent(task.taskId, getExternalState(finalState)));
    return finalState;
  }
  return task.getInternalState();
}
项目:hadoop-TCP    文件:TaskImpl.java   
@Override
public void transition(TaskImpl task, TaskEvent event) {

  if (task.historyTaskStartGenerated) {
  TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
        TaskStateInternal.KILLED, null); // TODO Verify failedAttemptId is null
  task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
      taskFailedEvent));
  }else {
    LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
  }

  task.eventHandler.handle(new JobTaskEvent(task.taskId,
      getExternalState(TaskStateInternal.KILLED)));
  task.metrics.endWaitingTask(task);
}
项目:hadoop-TCP    文件:JobBuilder.java   
private void processTaskFailedEvent(TaskFailedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), false);
  if (task == null) {
    return;
  }
  task.setFinishTime(event.getFinishTime());
  task.setTaskStatus(getPre21Value(event.getTaskStatus()));
  TaskFailed t = (TaskFailed)(event.getDatum());
  task.putDiagnosticInfo(t.error.toString());
  task.putFailedDueToAttemptId(t.failedDueToAttempt.toString());
  org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
      ((TaskFailed) event.getDatum()).counters;
  task.incorporateCounters(
      counters == null ? EMPTY_COUNTERS : counters);
}
项目:hardfs    文件:TaskImpl.java   
private static TaskFailedEvent createTaskFailedEvent(TaskImpl task, List<String> diag, TaskStateInternal taskState, TaskAttemptId taId) {
  StringBuilder errorSb = new StringBuilder();
  if (diag != null) {
    for (String d : diag) {
      errorSb.append(", ").append(d);
    }
  }
  TaskFailedEvent taskFailedEvent = new TaskFailedEvent(
      TypeConverter.fromYarn(task.taskId),
   // Hack since getFinishTime needs isFinished to be true and that doesn't happen till after the transition.
      task.getFinishTime(taId),
      TypeConverter.fromYarn(task.getType()),
      errorSb.toString(),
      taskState.toString(),
      taId == null ? null : TypeConverter.fromYarn(taId),
      task.getCounters());
  return taskFailedEvent;
}
项目:hardfs    文件:TaskImpl.java   
@Override
public TaskStateInternal transition(TaskImpl task, TaskEvent event) {
  TaskAttemptId taskAttemptId =
      ((TaskTAttemptEvent) event).getTaskAttemptID();
  task.handleTaskAttemptCompletion(taskAttemptId, taCompletionEventStatus);
  task.finishedAttempts.add(taskAttemptId);
  // check whether all attempts are finished
  if (task.finishedAttempts.size() == task.attempts.size()) {
    if (task.historyTaskStartGenerated) {
    TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
          finalState, null); // TODO JH verify failedAttempt null
    task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
        taskFailedEvent)); 
    } else {
      LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
    }

    task.eventHandler.handle(
        new JobTaskEvent(task.taskId, getExternalState(finalState)));
    return finalState;
  }
  return task.getInternalState();
}
项目:hardfs    文件:TaskImpl.java   
@Override
public void transition(TaskImpl task, TaskEvent event) {

  if (task.historyTaskStartGenerated) {
  TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
        TaskStateInternal.KILLED, null); // TODO Verify failedAttemptId is null
  task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
      taskFailedEvent));
  }else {
    LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
  }

  task.eventHandler.handle(new JobTaskEvent(task.taskId,
      getExternalState(TaskStateInternal.KILLED)));
  task.metrics.endWaitingTask(task);
}
项目:hardfs    文件:JobBuilder.java   
private void processTaskFailedEvent(TaskFailedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), false);
  if (task == null) {
    return;
  }
  task.setFinishTime(event.getFinishTime());
  task.setTaskStatus(getPre21Value(event.getTaskStatus()));
  TaskFailed t = (TaskFailed)(event.getDatum());
  task.putDiagnosticInfo(t.error.toString());
  task.putFailedDueToAttemptId(t.failedDueToAttempt.toString());
  org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
      ((TaskFailed) event.getDatum()).counters;
  task.incorporateCounters(
      counters == null ? EMPTY_COUNTERS : counters);
}
项目:hadoop-on-lustre2    文件:TaskImpl.java   
private static TaskFailedEvent createTaskFailedEvent(TaskImpl task, List<String> diag, TaskStateInternal taskState, TaskAttemptId taId) {
  StringBuilder errorSb = new StringBuilder();
  if (diag != null) {
    for (String d : diag) {
      errorSb.append(", ").append(d);
    }
  }
  TaskFailedEvent taskFailedEvent = new TaskFailedEvent(
      TypeConverter.fromYarn(task.taskId),
   // Hack since getFinishTime needs isFinished to be true and that doesn't happen till after the transition.
      task.getFinishTime(taId),
      TypeConverter.fromYarn(task.getType()),
      errorSb.toString(),
      taskState.toString(),
      taId == null ? null : TypeConverter.fromYarn(taId),
      task.getCounters());
  return taskFailedEvent;
}
项目:hadoop-on-lustre2    文件:TaskImpl.java   
@Override
public TaskStateInternal transition(TaskImpl task, TaskEvent event) {
  TaskAttemptId taskAttemptId =
      ((TaskTAttemptEvent) event).getTaskAttemptID();
  task.handleTaskAttemptCompletion(taskAttemptId, taCompletionEventStatus);
  task.finishedAttempts.add(taskAttemptId);
  // check whether all attempts are finished
  if (task.finishedAttempts.size() == task.attempts.size()) {
    if (task.historyTaskStartGenerated) {
    TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
          finalState, null); // TODO JH verify failedAttempt null
    task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
        taskFailedEvent)); 
    } else {
      LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
    }

    task.eventHandler.handle(
        new JobTaskEvent(task.taskId, getExternalState(finalState)));
    return finalState;
  }
  return task.getInternalState();
}
项目:hadoop-on-lustre2    文件:TaskImpl.java   
@Override
public void transition(TaskImpl task, TaskEvent event) {

  if (task.historyTaskStartGenerated) {
  TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, null,
        TaskStateInternal.KILLED, null); // TODO Verify failedAttemptId is null
  task.eventHandler.handle(new JobHistoryEvent(task.taskId.getJobId(),
      taskFailedEvent));
  }else {
    LOG.debug("Not generating HistoryFinish event since start event not" +
            " generated for task: " + task.getID());
  }

  task.eventHandler.handle(new JobTaskEvent(task.taskId,
      getExternalState(TaskStateInternal.KILLED)));
  task.metrics.endWaitingTask(task);
}
项目:hadoop-on-lustre2    文件:JobBuilder.java   
private void processTaskFailedEvent(TaskFailedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), false);
  if (task == null) {
    return;
  }
  task.setFinishTime(event.getFinishTime());
  task.setTaskStatus(getPre21Value(event.getTaskStatus()));
  TaskFailed t = (TaskFailed)(event.getDatum());
  task.putDiagnosticInfo(t.error.toString());
  task.putFailedDueToAttemptId(t.failedDueToAttempt.toString());
  org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
      ((TaskFailed) event.getDatum()).counters;
  task.incorporateCounters(
      counters == null ? EMPTY_COUNTERS : counters);
}
项目:hadoop    文件:Task20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String taskIDName,
    HistoryEventEmitter thatg) {
  if (taskIDName == null) {
    return null;
  }

  TaskID taskID = TaskID.forName(taskIDName);

  String status = line.get("TASK_STATUS");
  String finishTime = line.get("FINISH_TIME");

  String taskType = line.get("TASK_TYPE");

  String error = line.get("ERROR");

  if (finishTime != null
      && (error != null || (status != null && !status
          .equalsIgnoreCase("success")))) {
    Task20LineHistoryEventEmitter that =
        (Task20LineHistoryEventEmitter) thatg;

    TaskType originalTaskType =
        that.originalTaskType == null ? Version20LogInterfaceUtils
            .get20TaskType(taskType) : that.originalTaskType;

    return new TaskFailedEvent(taskID, Long.parseLong(finishTime),
        originalTaskType, error, status, null);
  }

  return null;
}