public StubbedJob(JobId jobId, ApplicationAttemptId applicationAttemptId, Configuration conf, EventHandler eventHandler, boolean newApiCommitter, String user, int numSplits, AppContext appContext) { super(jobId, applicationAttemptId, conf, eventHandler, null, new JobTokenSecretManager(), new Credentials(), new SystemClock(), Collections.<TaskId, TaskInfo> emptyMap(), MRAppMetrics.create(), null, newApiCommitter, user, System.currentTimeMillis(), null, appContext, null, null); initTransition = getInitTransition(numSplits); localFactory = stateMachineFactory.addTransition(JobStateInternal.NEW, EnumSet.of(JobStateInternal.INITED, JobStateInternal.FAILED), JobEventType.JOB_INIT, // This is abusive. initTransition); // This "this leak" is okay because the retained pointer is in an // instance variable. localStateMachine = localFactory.make(this); }
private void loadAllTasks() { if (tasksLoaded.get()) { return; } tasksLock.lock(); try { if (tasksLoaded.get()) { return; } for (Map.Entry<TaskID, TaskInfo> entry : jobInfo.getAllTasks().entrySet()) { TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey()); TaskInfo taskInfo = entry.getValue(); Task task = new CompletedTask(yarnTaskID, taskInfo); tasks.put(yarnTaskID, task); if (task.getType() == TaskType.MAP) { mapTasks.put(task.getID(), task); } else if (task.getType() == TaskType.REDUCE) { reduceTasks.put(task.getID(), task); } } tasksLoaded.set(true); } finally { tasksLock.unlock(); } }
private long computeFinishedMaps(JobInfo jobInfo, int numMaps, int numSuccessfulMaps) { if (numMaps == numSuccessfulMaps) { return jobInfo.getFinishedMaps(); } long numFinishedMaps = 0; Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo .getAllTasks(); for (TaskInfo taskInfo : taskInfos.values()) { if (TaskState.SUCCEEDED.toString().equals(taskInfo.getTaskStatus())) { ++numFinishedMaps; } } return numFinishedMaps; }
private TimelineEntity createTaskEntity(TaskInfo taskInfo) { TimelineEntity task = new TimelineEntity(); task.setEntityType(TASK); task.setEntityId(taskInfo.getTaskId().toString()); task.setStartTime(taskInfo.getStartTime()); task.addOtherInfo("START_TIME", taskInfo.getStartTime()); task.addOtherInfo("FINISH_TIME", taskInfo.getFinishTime()); task.addOtherInfo("TASK_TYPE", taskInfo.getTaskType()); task.addOtherInfo("TASK_STATUS", taskInfo.getTaskStatus()); task.addOtherInfo("ERROR_INFO", taskInfo.getError()); LOG.info("converted task " + taskInfo.getTaskId() + " to a timeline entity"); return task; }
public StubbedJob(JobId jobId, ApplicationAttemptId applicationAttemptId, Configuration conf, EventHandler eventHandler, boolean newApiCommitter, String user, int numSplits) { super(jobId, applicationAttemptId, conf, eventHandler, null, new JobTokenSecretManager(), new Credentials(), new SystemClock(), Collections.<TaskId, TaskInfo> emptyMap(), MRAppMetrics.create(), null, newApiCommitter, user, System.currentTimeMillis(), null, null, null, null); initTransition = getInitTransition(numSplits); localFactory = stateMachineFactory.addTransition(JobStateInternal.NEW, EnumSet.of(JobStateInternal.INITED, JobStateInternal.FAILED), JobEventType.JOB_INIT, // This is abusive. initTransition); // This "this leak" is okay because the retained pointer is in an // instance variable. localStateMachine = localFactory.make(this); }
public TaskRecoverEvent(TaskId taskID, TaskInfo taskInfo, OutputCommitter committer, boolean recoverTaskOutput) { super(taskID, TaskEventType.T_RECOVER); this.taskInfo = taskInfo; this.committer = committer; this.recoverTaskOutput = recoverTaskOutput; }
protected void scheduleTasks(Set<TaskId> taskIDs, boolean recoverTaskOutput) { for (TaskId taskID : taskIDs) { TaskInfo taskInfo = completedTasksFromPreviousRun.remove(taskID); if (taskInfo != null) { eventHandler.handle(new TaskRecoverEvent(taskID, taskInfo, committer, recoverTaskOutput)); } else { eventHandler.handle(new TaskEvent(taskID, TaskEventType.T_SCHEDULE)); } } }
private void printTasks(TaskType taskType, String status) { Map<TaskID, JobHistoryParser.TaskInfo> tasks = job.getAllTasks(); StringBuffer header = new StringBuffer(); header.append("\n").append(status).append(" "); header.append(taskType).append(" task list for ").append(jobId); header.append("\nTaskId\t\tStartTime\tFinishTime\tError"); if (TaskType.MAP.equals(taskType)) { header.append("\tInputSplits"); } header.append("\n===================================================="); StringBuffer taskList = new StringBuffer(); for (JobHistoryParser.TaskInfo task : tasks.values()) { if (taskType.equals(task.getTaskType()) && (status.equals(task.getTaskStatus()) || status.equalsIgnoreCase("ALL"))) { taskList.setLength(0); taskList.append(task.getTaskId()); taskList.append("\t").append(StringUtils.getFormattedTimeWithDiff( dateFormat, task.getStartTime(), 0)); taskList.append("\t").append(StringUtils.getFormattedTimeWithDiff( dateFormat, task.getFinishTime(), task.getStartTime())); taskList.append("\t").append(task.getError()); if (TaskType.MAP.equals(taskType)) { taskList.append("\t").append(task.getSplitLocations()); } if (taskList != null) { System.out.println(header.toString()); System.out.println(taskList.toString()); } } } }
/** Apply the filter (status) on the parsed job and generate summary */ public FilteredJob(JobInfo job, String status) { filter = status; Map<TaskID, JobHistoryParser.TaskInfo> tasks = job.getAllTasks(); for (JobHistoryParser.TaskInfo task : tasks.values()) { Map<TaskAttemptID, JobHistoryParser.TaskAttemptInfo> attempts = task.getAllTaskAttempts(); for (JobHistoryParser.TaskAttemptInfo attempt : attempts.values()) { if (attempt.getTaskStatus().equals(status)) { String hostname = attempt.getHostname(); TaskID id = attempt.getAttemptId().getTaskID(); Set<TaskID> set = badNodesToFilteredTasks.get(hostname); if (set == null) { set = new TreeSet<TaskID>(); set.add(id); badNodesToFilteredTasks.put(hostname, set); }else{ set.add(id); } } } } }
private Set<TimelineEntity> createTaskAndTaskAttemptEntities(JobInfo jobInfo) { Set<TimelineEntity> entities = new HashSet<>(); Map<TaskID,TaskInfo> taskInfoMap = jobInfo.getAllTasks(); LOG.info("job " + jobInfo.getJobId()+ " has " + taskInfoMap.size() + " tasks"); for (TaskInfo taskInfo: taskInfoMap.values()) { TimelineEntity task = createTaskEntity(taskInfo); entities.add(task); // add the task attempts from this task Set<TimelineEntity> taskAttempts = createTaskAttemptEntities(taskInfo); entities.addAll(taskAttempts); } return entities; }
private Set<TimelineEntity> createTaskAttemptEntities(TaskInfo taskInfo) { Set<TimelineEntity> taskAttempts = new HashSet<TimelineEntity>(); Map<TaskAttemptID,TaskAttemptInfo> taskAttemptInfoMap = taskInfo.getAllTaskAttempts(); LOG.info("task " + taskInfo.getTaskId() + " has " + taskAttemptInfoMap.size() + " task attempts"); for (TaskAttemptInfo taskAttemptInfo: taskAttemptInfoMap.values()) { TimelineEntity taskAttempt = createTaskAttemptEntity(taskAttemptInfo); taskAttempts.add(taskAttempt); } return taskAttempts; }