Java 类org.apache.hadoop.mapreduce.jobhistory.TaskStartedEvent 实例源码

项目:hadoop    文件:Task20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String taskIDName,
    HistoryEventEmitter thatg) {
  if (taskIDName == null) {
    return null;
  }

  TaskID taskID = TaskID.forName(taskIDName);

  String taskType = line.get("TASK_TYPE");
  String startTime = line.get("START_TIME");
  String splits = line.get("SPLITS");

  if (startTime != null && taskType != null) {
    Task20LineHistoryEventEmitter that =
        (Task20LineHistoryEventEmitter) thatg;

    that.originalStartTime = Long.parseLong(startTime);
    that.originalTaskType =
        Version20LogInterfaceUtils.get20TaskType(taskType);

    return new TaskStartedEvent(taskID, that.originalStartTime,
        that.originalTaskType, splits);
  }

  return null;
}
项目:hadoop    文件:TopologyBuilder.java   
/**
 * Process one {@link HistoryEvent}
 * 
 * @param event
 *          The {@link HistoryEvent} to be processed.
 */
public void process(HistoryEvent event) {
  if (event instanceof TaskAttemptFinishedEvent) {
    processTaskAttemptFinishedEvent((TaskAttemptFinishedEvent) event);
  } else if (event instanceof TaskAttemptUnsuccessfulCompletionEvent) {
    processTaskAttemptUnsuccessfulCompletionEvent((TaskAttemptUnsuccessfulCompletionEvent) event);
  } else if (event instanceof TaskStartedEvent) {
    processTaskStartedEvent((TaskStartedEvent) event);
  } else if (event instanceof MapAttemptFinishedEvent) {
    processMapAttemptFinishedEvent((MapAttemptFinishedEvent) event);
  } else if (event instanceof ReduceAttemptFinishedEvent) {
    processReduceAttemptFinishedEvent((ReduceAttemptFinishedEvent) event);
  }

  // I do NOT expect these if statements to be exhaustive.
}
项目:aliyun-oss-hadoop-fs    文件:Task20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String taskIDName,
    HistoryEventEmitter thatg) {
  if (taskIDName == null) {
    return null;
  }

  TaskID taskID = TaskID.forName(taskIDName);

  String taskType = line.get("TASK_TYPE");
  String startTime = line.get("START_TIME");
  String splits = line.get("SPLITS");

  if (startTime != null && taskType != null) {
    Task20LineHistoryEventEmitter that =
        (Task20LineHistoryEventEmitter) thatg;

    that.originalStartTime = Long.parseLong(startTime);
    that.originalTaskType =
        Version20LogInterfaceUtils.get20TaskType(taskType);

    return new TaskStartedEvent(taskID, that.originalStartTime,
        that.originalTaskType, splits);
  }

  return null;
}
项目:aliyun-oss-hadoop-fs    文件:TopologyBuilder.java   
/**
 * Process one {@link HistoryEvent}
 * 
 * @param event
 *          The {@link HistoryEvent} to be processed.
 */
public void process(HistoryEvent event) {
  if (event instanceof TaskAttemptFinishedEvent) {
    processTaskAttemptFinishedEvent((TaskAttemptFinishedEvent) event);
  } else if (event instanceof TaskAttemptUnsuccessfulCompletionEvent) {
    processTaskAttemptUnsuccessfulCompletionEvent((TaskAttemptUnsuccessfulCompletionEvent) event);
  } else if (event instanceof TaskStartedEvent) {
    processTaskStartedEvent((TaskStartedEvent) event);
  } else if (event instanceof MapAttemptFinishedEvent) {
    processMapAttemptFinishedEvent((MapAttemptFinishedEvent) event);
  } else if (event instanceof ReduceAttemptFinishedEvent) {
    processReduceAttemptFinishedEvent((ReduceAttemptFinishedEvent) event);
  }

  // I do NOT expect these if statements to be exhaustive.
}
项目:big-c    文件:Task20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String taskIDName,
    HistoryEventEmitter thatg) {
  if (taskIDName == null) {
    return null;
  }

  TaskID taskID = TaskID.forName(taskIDName);

  String taskType = line.get("TASK_TYPE");
  String startTime = line.get("START_TIME");
  String splits = line.get("SPLITS");

  if (startTime != null && taskType != null) {
    Task20LineHistoryEventEmitter that =
        (Task20LineHistoryEventEmitter) thatg;

    that.originalStartTime = Long.parseLong(startTime);
    that.originalTaskType =
        Version20LogInterfaceUtils.get20TaskType(taskType);

    return new TaskStartedEvent(taskID, that.originalStartTime,
        that.originalTaskType, splits);
  }

  return null;
}
项目:big-c    文件:TopologyBuilder.java   
/**
 * Process one {@link HistoryEvent}
 * 
 * @param event
 *          The {@link HistoryEvent} to be processed.
 */
public void process(HistoryEvent event) {
  if (event instanceof TaskAttemptFinishedEvent) {
    processTaskAttemptFinishedEvent((TaskAttemptFinishedEvent) event);
  } else if (event instanceof TaskAttemptUnsuccessfulCompletionEvent) {
    processTaskAttemptUnsuccessfulCompletionEvent((TaskAttemptUnsuccessfulCompletionEvent) event);
  } else if (event instanceof TaskStartedEvent) {
    processTaskStartedEvent((TaskStartedEvent) event);
  } else if (event instanceof MapAttemptFinishedEvent) {
    processMapAttemptFinishedEvent((MapAttemptFinishedEvent) event);
  } else if (event instanceof ReduceAttemptFinishedEvent) {
    processReduceAttemptFinishedEvent((ReduceAttemptFinishedEvent) event);
  }

  // I do NOT expect these if statements to be exhaustive.
}
项目:hadoop-2.6.0-cdh5.4.3    文件:Task20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String taskIDName,
    HistoryEventEmitter thatg) {
  if (taskIDName == null) {
    return null;
  }

  TaskID taskID = TaskID.forName(taskIDName);

  String taskType = line.get("TASK_TYPE");
  String startTime = line.get("START_TIME");
  String splits = line.get("SPLITS");

  if (startTime != null && taskType != null) {
    Task20LineHistoryEventEmitter that =
        (Task20LineHistoryEventEmitter) thatg;

    that.originalStartTime = Long.parseLong(startTime);
    that.originalTaskType =
        Version20LogInterfaceUtils.get20TaskType(taskType);

    return new TaskStartedEvent(taskID, that.originalStartTime,
        that.originalTaskType, splits);
  }

  return null;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TopologyBuilder.java   
/**
 * Process one {@link HistoryEvent}
 * 
 * @param event
 *          The {@link HistoryEvent} to be processed.
 */
public void process(HistoryEvent event) {
  if (event instanceof TaskAttemptFinishedEvent) {
    processTaskAttemptFinishedEvent((TaskAttemptFinishedEvent) event);
  } else if (event instanceof TaskAttemptUnsuccessfulCompletionEvent) {
    processTaskAttemptUnsuccessfulCompletionEvent((TaskAttemptUnsuccessfulCompletionEvent) event);
  } else if (event instanceof TaskStartedEvent) {
    processTaskStartedEvent((TaskStartedEvent) event);
  } else if (event instanceof MapAttemptFinishedEvent) {
    processMapAttemptFinishedEvent((MapAttemptFinishedEvent) event);
  } else if (event instanceof ReduceAttemptFinishedEvent) {
    processReduceAttemptFinishedEvent((ReduceAttemptFinishedEvent) event);
  }

  // I do NOT expect these if statements to be exhaustive.
}
项目:hadoop-plus    文件:Task20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String taskIDName,
    HistoryEventEmitter thatg) {
  if (taskIDName == null) {
    return null;
  }

  TaskID taskID = TaskID.forName(taskIDName);

  String taskType = line.get("TASK_TYPE");
  String startTime = line.get("START_TIME");
  String splits = line.get("SPLITS");

  if (startTime != null && taskType != null) {
    Task20LineHistoryEventEmitter that =
        (Task20LineHistoryEventEmitter) thatg;

    that.originalStartTime = Long.parseLong(startTime);
    that.originalTaskType =
        Version20LogInterfaceUtils.get20TaskType(taskType);

    return new TaskStartedEvent(taskID, that.originalStartTime,
        that.originalTaskType, splits);
  }

  return null;
}
项目:hadoop-plus    文件:TopologyBuilder.java   
/**
 * Process one {@link HistoryEvent}
 * 
 * @param event
 *          The {@link HistoryEvent} to be processed.
 */
public void process(HistoryEvent event) {
  if (event instanceof TaskAttemptFinishedEvent) {
    processTaskAttemptFinishedEvent((TaskAttemptFinishedEvent) event);
  } else if (event instanceof TaskAttemptUnsuccessfulCompletionEvent) {
    processTaskAttemptUnsuccessfulCompletionEvent((TaskAttemptUnsuccessfulCompletionEvent) event);
  } else if (event instanceof TaskStartedEvent) {
    processTaskStartedEvent((TaskStartedEvent) event);
  } else if (event instanceof MapAttemptFinishedEvent) {
    processMapAttemptFinishedEvent((MapAttemptFinishedEvent) event);
  } else if (event instanceof ReduceAttemptFinishedEvent) {
    processReduceAttemptFinishedEvent((ReduceAttemptFinishedEvent) event);
  }

  // I do NOT expect these if statements to be exhaustive.
}
项目:hops    文件:Task20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String taskIDName,
    HistoryEventEmitter thatg) {
  if (taskIDName == null) {
    return null;
  }

  TaskID taskID = TaskID.forName(taskIDName);

  String taskType = line.get("TASK_TYPE");
  String startTime = line.get("START_TIME");
  String splits = line.get("SPLITS");

  if (startTime != null && taskType != null) {
    Task20LineHistoryEventEmitter that =
        (Task20LineHistoryEventEmitter) thatg;

    that.originalStartTime = Long.parseLong(startTime);
    that.originalTaskType =
        Version20LogInterfaceUtils.get20TaskType(taskType);

    return new TaskStartedEvent(taskID, that.originalStartTime,
        that.originalTaskType, splits);
  }

  return null;
}
项目:hops    文件:TopologyBuilder.java   
/**
 * Process one {@link HistoryEvent}
 * 
 * @param event
 *          The {@link HistoryEvent} to be processed.
 */
public void process(HistoryEvent event) {
  if (event instanceof TaskAttemptFinishedEvent) {
    processTaskAttemptFinishedEvent((TaskAttemptFinishedEvent) event);
  } else if (event instanceof TaskAttemptUnsuccessfulCompletionEvent) {
    processTaskAttemptUnsuccessfulCompletionEvent((TaskAttemptUnsuccessfulCompletionEvent) event);
  } else if (event instanceof TaskStartedEvent) {
    processTaskStartedEvent((TaskStartedEvent) event);
  } else if (event instanceof MapAttemptFinishedEvent) {
    processMapAttemptFinishedEvent((MapAttemptFinishedEvent) event);
  } else if (event instanceof ReduceAttemptFinishedEvent) {
    processReduceAttemptFinishedEvent((ReduceAttemptFinishedEvent) event);
  }

  // I do NOT expect these if statements to be exhaustive.
}
项目:hadoop-TCP    文件:Task20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String taskIDName,
    HistoryEventEmitter thatg) {
  if (taskIDName == null) {
    return null;
  }

  TaskID taskID = TaskID.forName(taskIDName);

  String taskType = line.get("TASK_TYPE");
  String startTime = line.get("START_TIME");
  String splits = line.get("SPLITS");

  if (startTime != null && taskType != null) {
    Task20LineHistoryEventEmitter that =
        (Task20LineHistoryEventEmitter) thatg;

    that.originalStartTime = Long.parseLong(startTime);
    that.originalTaskType =
        Version20LogInterfaceUtils.get20TaskType(taskType);

    return new TaskStartedEvent(taskID, that.originalStartTime,
        that.originalTaskType, splits);
  }

  return null;
}
项目:hadoop-TCP    文件:TopologyBuilder.java   
/**
 * Process one {@link HistoryEvent}
 * 
 * @param event
 *          The {@link HistoryEvent} to be processed.
 */
public void process(HistoryEvent event) {
  if (event instanceof TaskAttemptFinishedEvent) {
    processTaskAttemptFinishedEvent((TaskAttemptFinishedEvent) event);
  } else if (event instanceof TaskAttemptUnsuccessfulCompletionEvent) {
    processTaskAttemptUnsuccessfulCompletionEvent((TaskAttemptUnsuccessfulCompletionEvent) event);
  } else if (event instanceof TaskStartedEvent) {
    processTaskStartedEvent((TaskStartedEvent) event);
  } else if (event instanceof MapAttemptFinishedEvent) {
    processMapAttemptFinishedEvent((MapAttemptFinishedEvent) event);
  } else if (event instanceof ReduceAttemptFinishedEvent) {
    processReduceAttemptFinishedEvent((ReduceAttemptFinishedEvent) event);
  }

  // I do NOT expect these if statements to be exhaustive.
}
项目:hardfs    文件:Task20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String taskIDName,
    HistoryEventEmitter thatg) {
  if (taskIDName == null) {
    return null;
  }

  TaskID taskID = TaskID.forName(taskIDName);

  String taskType = line.get("TASK_TYPE");
  String startTime = line.get("START_TIME");
  String splits = line.get("SPLITS");

  if (startTime != null && taskType != null) {
    Task20LineHistoryEventEmitter that =
        (Task20LineHistoryEventEmitter) thatg;

    that.originalStartTime = Long.parseLong(startTime);
    that.originalTaskType =
        Version20LogInterfaceUtils.get20TaskType(taskType);

    return new TaskStartedEvent(taskID, that.originalStartTime,
        that.originalTaskType, splits);
  }

  return null;
}
项目:hardfs    文件:TopologyBuilder.java   
/**
 * Process one {@link HistoryEvent}
 * 
 * @param event
 *          The {@link HistoryEvent} to be processed.
 */
public void process(HistoryEvent event) {
  if (event instanceof TaskAttemptFinishedEvent) {
    processTaskAttemptFinishedEvent((TaskAttemptFinishedEvent) event);
  } else if (event instanceof TaskAttemptUnsuccessfulCompletionEvent) {
    processTaskAttemptUnsuccessfulCompletionEvent((TaskAttemptUnsuccessfulCompletionEvent) event);
  } else if (event instanceof TaskStartedEvent) {
    processTaskStartedEvent((TaskStartedEvent) event);
  } else if (event instanceof MapAttemptFinishedEvent) {
    processMapAttemptFinishedEvent((MapAttemptFinishedEvent) event);
  } else if (event instanceof ReduceAttemptFinishedEvent) {
    processReduceAttemptFinishedEvent((ReduceAttemptFinishedEvent) event);
  }

  // I do NOT expect these if statements to be exhaustive.
}
项目:hadoop-on-lustre2    文件:Task20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String taskIDName,
    HistoryEventEmitter thatg) {
  if (taskIDName == null) {
    return null;
  }

  TaskID taskID = TaskID.forName(taskIDName);

  String taskType = line.get("TASK_TYPE");
  String startTime = line.get("START_TIME");
  String splits = line.get("SPLITS");

  if (startTime != null && taskType != null) {
    Task20LineHistoryEventEmitter that =
        (Task20LineHistoryEventEmitter) thatg;

    that.originalStartTime = Long.parseLong(startTime);
    that.originalTaskType =
        Version20LogInterfaceUtils.get20TaskType(taskType);

    return new TaskStartedEvent(taskID, that.originalStartTime,
        that.originalTaskType, splits);
  }

  return null;
}
项目:hadoop-on-lustre2    文件:TopologyBuilder.java   
/**
 * Process one {@link HistoryEvent}
 * 
 * @param event
 *          The {@link HistoryEvent} to be processed.
 */
public void process(HistoryEvent event) {
  if (event instanceof TaskAttemptFinishedEvent) {
    processTaskAttemptFinishedEvent((TaskAttemptFinishedEvent) event);
  } else if (event instanceof TaskAttemptUnsuccessfulCompletionEvent) {
    processTaskAttemptUnsuccessfulCompletionEvent((TaskAttemptUnsuccessfulCompletionEvent) event);
  } else if (event instanceof TaskStartedEvent) {
    processTaskStartedEvent((TaskStartedEvent) event);
  } else if (event instanceof MapAttemptFinishedEvent) {
    processMapAttemptFinishedEvent((MapAttemptFinishedEvent) event);
  } else if (event instanceof ReduceAttemptFinishedEvent) {
    processReduceAttemptFinishedEvent((ReduceAttemptFinishedEvent) event);
  }

  // I do NOT expect these if statements to be exhaustive.
}
项目:mapreduce-fork    文件:Task20LineHistoryEventEmitter.java   
HistoryEvent maybeEmitEvent(ParsedLine line, String taskIDName,
    HistoryEventEmitter thatg) {
  if (taskIDName == null) {
    return null;
  }

  TaskID taskID = TaskID.forName(taskIDName);

  String taskType = line.get("TASK_TYPE");
  String startTime = line.get("START_TIME");
  String splits = line.get("SPLITS");

  if (startTime != null && taskType != null) {
    Task20LineHistoryEventEmitter that =
        (Task20LineHistoryEventEmitter) thatg;

    that.originalStartTime = Long.parseLong(startTime);
    that.originalTaskType =
        Version20LogInterfaceUtils.get20TaskType(taskType);

    return new TaskStartedEvent(taskID, that.originalStartTime,
        that.originalTaskType, splits);
  }

  return null;
}
项目:hadoop    文件:TaskImpl.java   
private void sendTaskStartedEvent() {
  TaskStartedEvent tse = new TaskStartedEvent(
      TypeConverter.fromYarn(taskId), getLaunchTime(),
      TypeConverter.fromYarn(taskId.getTaskType()),
      getSplitsAsString());
  eventHandler
      .handle(new JobHistoryEvent(taskId.getJobId(), tse));
  historyTaskStartGenerated = true;
}
项目:hadoop    文件:JobBuilder.java   
private void processTaskStartedEvent(TaskStartedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), true);
  task.setStartTime(event.getStartTime());
  task.setPreferredLocations(preferredLocationForSplits(event
      .getSplitLocations()));
}
项目:aliyun-oss-hadoop-fs    文件:TaskImpl.java   
private void sendTaskStartedEvent() {
  TaskStartedEvent tse = new TaskStartedEvent(
      TypeConverter.fromYarn(taskId), getLaunchTime(),
      TypeConverter.fromYarn(taskId.getTaskType()),
      getSplitsAsString());
  eventHandler
      .handle(new JobHistoryEvent(taskId.getJobId(), tse));
  historyTaskStartGenerated = true;
}
项目:aliyun-oss-hadoop-fs    文件:JobBuilder.java   
private void processTaskStartedEvent(TaskStartedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), true);
  task.setStartTime(event.getStartTime());
  task.setPreferredLocations(preferredLocationForSplits(event
      .getSplitLocations()));
}
项目:big-c    文件:TaskImpl.java   
private void sendTaskStartedEvent() {
  TaskStartedEvent tse = new TaskStartedEvent(
      TypeConverter.fromYarn(taskId), getLaunchTime(),
      TypeConverter.fromYarn(taskId.getTaskType()),
      getSplitsAsString());
  eventHandler
      .handle(new JobHistoryEvent(taskId.getJobId(), tse));
  historyTaskStartGenerated = true;
}
项目:big-c    文件:JobBuilder.java   
private void processTaskStartedEvent(TaskStartedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), true);
  task.setStartTime(event.getStartTime());
  task.setPreferredLocations(preferredLocationForSplits(event
      .getSplitLocations()));
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TaskImpl.java   
private void sendTaskStartedEvent() {
  TaskStartedEvent tse = new TaskStartedEvent(
      TypeConverter.fromYarn(taskId), getLaunchTime(),
      TypeConverter.fromYarn(taskId.getTaskType()),
      getSplitsAsString());
  eventHandler
      .handle(new JobHistoryEvent(taskId.getJobId(), tse));
  historyTaskStartGenerated = true;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:JobBuilder.java   
private void processTaskStartedEvent(TaskStartedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), true);
  task.setStartTime(event.getStartTime());
  task.setPreferredLocations(preferredLocationForSplits(event
      .getSplitLocations()));
}
项目:hadoop-plus    文件:TaskImpl.java   
private void sendTaskStartedEvent() {
  TaskStartedEvent tse = new TaskStartedEvent(
      TypeConverter.fromYarn(taskId), getLaunchTime(),
      TypeConverter.fromYarn(taskId.getTaskType()),
      getSplitsAsString());
  eventHandler
      .handle(new JobHistoryEvent(taskId.getJobId(), tse));
  historyTaskStartGenerated = true;
}
项目:hadoop-plus    文件:JobBuilder.java   
private void processTaskStartedEvent(TaskStartedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), true);
  task.setStartTime(event.getStartTime());
  task.setPreferredLocations(preferredLocationForSplits(event
      .getSplitLocations()));
}
项目:FlexMap    文件:TaskImpl.java   
private void sendTaskStartedEvent() {
  TaskStartedEvent tse = new TaskStartedEvent(
      TypeConverter.fromYarn(taskId), getLaunchTime(),
      TypeConverter.fromYarn(taskId.getTaskType()),
      getSplitsAsString());
  eventHandler
      .handle(new JobHistoryEvent(taskId.getJobId(), tse));
  historyTaskStartGenerated = true;
}
项目:hops    文件:TaskImpl.java   
private void sendTaskStartedEvent() {
  TaskStartedEvent tse = new TaskStartedEvent(
      TypeConverter.fromYarn(taskId), getLaunchTime(),
      TypeConverter.fromYarn(taskId.getTaskType()),
      getSplitsAsString());
  eventHandler
      .handle(new JobHistoryEvent(taskId.getJobId(), tse));
  historyTaskStartGenerated = true;
}
项目:hops    文件:JobBuilder.java   
private void processTaskStartedEvent(TaskStartedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), true);
  task.setStartTime(event.getStartTime());
  task.setPreferredLocations(preferredLocationForSplits(event
      .getSplitLocations()));
}
项目:hadoop-TCP    文件:TaskImpl.java   
private void sendTaskStartedEvent() {
  TaskStartedEvent tse = new TaskStartedEvent(
      TypeConverter.fromYarn(taskId), getLaunchTime(),
      TypeConverter.fromYarn(taskId.getTaskType()),
      getSplitsAsString());
  eventHandler
      .handle(new JobHistoryEvent(taskId.getJobId(), tse));
  historyTaskStartGenerated = true;
}
项目:hadoop-TCP    文件:JobBuilder.java   
private void processTaskStartedEvent(TaskStartedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), true);
  task.setStartTime(event.getStartTime());
  task.setPreferredLocations(preferredLocationForSplits(event
      .getSplitLocations()));
}
项目:hardfs    文件:TaskImpl.java   
private void sendTaskStartedEvent() {
  TaskStartedEvent tse = new TaskStartedEvent(
      TypeConverter.fromYarn(taskId), getLaunchTime(),
      TypeConverter.fromYarn(taskId.getTaskType()),
      getSplitsAsString());
  eventHandler
      .handle(new JobHistoryEvent(taskId.getJobId(), tse));
  historyTaskStartGenerated = true;
}
项目:hardfs    文件:JobBuilder.java   
private void processTaskStartedEvent(TaskStartedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), true);
  task.setStartTime(event.getStartTime());
  task.setPreferredLocations(preferredLocationForSplits(event
      .getSplitLocations()));
}
项目:hadoop-on-lustre2    文件:TaskImpl.java   
private void sendTaskStartedEvent() {
  TaskStartedEvent tse = new TaskStartedEvent(
      TypeConverter.fromYarn(taskId), getLaunchTime(),
      TypeConverter.fromYarn(taskId.getTaskType()),
      getSplitsAsString());
  eventHandler
      .handle(new JobHistoryEvent(taskId.getJobId(), tse));
  historyTaskStartGenerated = true;
}
项目:hadoop-on-lustre2    文件:JobBuilder.java   
private void processTaskStartedEvent(TaskStartedEvent event) {
  ParsedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), true);
  task.setStartTime(event.getStartTime());
  task.setPreferredLocations(preferredLocationForSplits(event
      .getSplitLocations()));
}
项目:mapreduce-fork    文件:TopologyBuilder.java   
/**
 * Process one {@link HistoryEvent}
 * 
 * @param event
 *          The {@link HistoryEvent} to be processed.
 */
public void process(HistoryEvent event) {
  if (event instanceof TaskAttemptFinishedEvent) {
    processTaskAttemptFinishedEvent((TaskAttemptFinishedEvent) event);
  } else if (event instanceof TaskAttemptUnsuccessfulCompletionEvent) {
    processTaskAttemptUnsuccessfulCompletionEvent((TaskAttemptUnsuccessfulCompletionEvent) event);
  } else if (event instanceof TaskStartedEvent) {
    processTaskStartedEvent((TaskStartedEvent) event);
  }

  // I do NOT expect these if statements to be exhaustive.
}
项目:mapreduce-fork    文件:JobBuilder.java   
private void processTaskStartedEvent(TaskStartedEvent event) {
  LoggedTask task =
      getOrMakeTask(event.getTaskType(), event.getTaskId().toString(), true);
  task.setStartTime(event.getStartTime());
  task.setPreferredLocations(preferredLocationForSplits(event
      .getSplitLocations()));
}