@Override public LogParams getLogFileParams(JobID jobID, TaskAttemptID taskAttemptID) throws IOException { try { return clientCache.getClient(jobID).getLogFilePath(jobID, taskAttemptID); } catch (YarnException e) { throw new IOException(e); } }
@Override public LogParams getLogFileParams(JobID jobID, TaskAttemptID taskAttemptID) throws IOException { return clientCache.getClient(jobID).getLogFilePath(jobID, taskAttemptID); }
public LogParams getLogFilePath(JobID oldJobID, TaskAttemptID oldTaskAttemptID) throws IOException { org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = TypeConverter.toYarn(oldJobID); GetJobReportRequest request = recordFactory.newRecordInstance(GetJobReportRequest.class); request.setJobId(jobId); JobReport report = ((GetJobReportResponse) invoke("getJobReport", GetJobReportRequest.class, request)).getJobReport(); if (EnumSet.of(JobState.SUCCEEDED, JobState.FAILED, JobState.KILLED, JobState.ERROR).contains(report.getJobState())) { if (oldTaskAttemptID != null) { GetTaskAttemptReportRequest taRequest = recordFactory.newRecordInstance(GetTaskAttemptReportRequest.class); taRequest.setTaskAttemptId(TypeConverter.toYarn(oldTaskAttemptID)); TaskAttemptReport taReport = ((GetTaskAttemptReportResponse) invoke("getTaskAttemptReport", GetTaskAttemptReportRequest.class, taRequest)) .getTaskAttemptReport(); if (taReport.getContainerId() == null || taReport.getNodeManagerHost() == null) { throw new IOException("Unable to get log information for task: " + oldTaskAttemptID); } return new LogParams( taReport.getContainerId().toString(), taReport.getContainerId().getApplicationAttemptId() .getApplicationId().toString(), NodeId.newInstance(taReport.getNodeManagerHost(), taReport.getNodeManagerPort()).toString(), report.getUser()); } else { if (report.getAMInfos() == null || report.getAMInfos().size() == 0) { throw new IOException("Unable to get log information for job: " + oldJobID); } AMInfo amInfo = report.getAMInfos().get(report.getAMInfos().size() - 1); return new LogParams( amInfo.getContainerId().toString(), amInfo.getAppAttemptId().getApplicationId().toString(), NodeId.newInstance(amInfo.getNodeManagerHost(), amInfo.getNodeManagerPort()).toString(), report.getUser()); } } else { throw new IOException("Cannot get log path for a in-progress job"); } }
@Override public LogParams getLogFileParams(org.apache.hadoop.mapreduce.JobID jobID, org.apache.hadoop.mapreduce.TaskAttemptID taskAttemptID) throws IOException, InterruptedException { throw new UnsupportedOperationException("Not supported"); }
/** {@inheritDoc} */ @Override public LogParams getLogFileParams(JobID jobID, TaskAttemptID taskAttemptID) throws IOException, InterruptedException { return null; }
@Override public LogParams getLogFileParams(JobID jobID, TaskAttemptID taskAttemptID) throws IOException, InterruptedException { return backupRunner.getLogFileParams(jobID, taskAttemptID); }
public LogParams getLogFilePath(JobID oldJobID, TaskAttemptID oldTaskAttemptID) throws YarnException, IOException { // FIXME logs for an attempt? throw new UnsupportedOperationException(); }