Java 类org.apache.hadoop.mapreduce.v2.api.records.JobId 实例源码

项目:hadoop    文件:TestAMWebServicesAttempts.java   
@Test
public void testTaskAttemptsSlash() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    for (Task task : jobsMap.get(id).getTasks().values()) {

      String tid = MRApps.toString(task.getID());
      ClientResponse response = r.path("ws").path("v1").path("mapreduce")
          .path("jobs").path(jobId).path("tasks").path(tid).path("attempts/")
          .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
      assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
      JSONObject json = response.getEntity(JSONObject.class);
      verifyAMTaskAttempts(json, task);
    }
  }
}
项目:hadoop    文件:TestFileNameIndexUtils.java   
@Test
public void testQueueNamePercentEncoding() throws IOException {
  JobIndexInfo info = new JobIndexInfo();
  JobID oldJobId = JobID.forName(JOB_ID);
  JobId jobId = TypeConverter.toYarn(oldJobId);
  info.setJobId(jobId);
  info.setSubmitTime(Long.parseLong(SUBMIT_TIME));
  info.setUser(USER_NAME);
  info.setJobName(JOB_NAME);
  info.setFinishTime(Long.parseLong(FINISH_TIME));
  info.setNumMaps(Integer.parseInt(NUM_MAPS));
  info.setNumReduces(Integer.parseInt(NUM_REDUCES));
  info.setJobStatus(JOB_STATUS);
  info.setQueueName(QUEUE_NAME_WITH_DELIMITER);
  info.setJobStartTime(Long.parseLong(JOB_START_TIME));

  String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info);
  Assert.assertTrue("Queue name not encoded correctly into job history file",
      jobHistoryFile.contains(QUEUE_NAME_WITH_DELIMITER_ESCAPE));
}
项目:hadoop    文件:TestJobImpl.java   
private static StubbedJob createStubbedJob(Configuration conf,
    Dispatcher dispatcher, int numSplits, AppContext appContext) {
  JobID jobID = JobID.forName("job_1234567890000_0001");
  JobId jobId = TypeConverter.toYarn(jobID);
  if (appContext == null) {
    appContext = mock(AppContext.class);
    when(appContext.hasSuccessfullyUnregistered()).thenReturn(true);
  }
  StubbedJob job = new StubbedJob(jobId,
      ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 0), 0),
      conf,dispatcher.getEventHandler(), true, "somebody", numSplits, appContext);
  dispatcher.register(JobEventType.class, job);
  EventHandler mockHandler = mock(EventHandler.class);
  dispatcher.register(TaskEventType.class, mockHandler);
  dispatcher.register(org.apache.hadoop.mapreduce.jobhistory.EventType.class,
      mockHandler);
  dispatcher.register(JobFinishEvent.Type.class, mockHandler);
  return job;
}
项目:hadoop    文件:TestAMWebServicesJobs.java   
@Test
public void testJobIdXML() throws Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("mapreduce")
        .path("jobs").path(jobId).accept(MediaType.APPLICATION_XML)
        .get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
    String xml = response.getEntity(String.class);
    DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
    DocumentBuilder db = dbf.newDocumentBuilder();
    InputSource is = new InputSource();
    is.setCharacterStream(new StringReader(xml));
    Document dom = db.parse(is);
    NodeList job = dom.getElementsByTagName("job");
    verifyAMJobXML(job, appContext);
  }

}
项目:hadoop    文件:TestHsWebServicesTasks.java   
@Test
public void testTasks() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    ClientResponse response = r.path("ws").path("v1").path("history")
        .path("mapreduce").path("jobs").path(jobId).path("tasks")
        .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject tasks = json.getJSONObject("tasks");
    JSONArray arr = tasks.getJSONArray("task");
    assertEquals("incorrect number of elements", 2, arr.length());

    verifyHsTask(arr, jobsMap.get(id), null);
  }
}
项目:hadoop    文件:TestAMWebServicesJobs.java   
@Test
public void testJobIdSlash() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("mapreduce")
        .path("jobs").path(jobId + "/").accept(MediaType.APPLICATION_JSON)
        .get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject info = json.getJSONObject("job");
    verifyAMJob(info, jobsMap.get(id));
  }
}
项目:hadoop    文件:TestHsWebServicesJobs.java   
@Test
public void testJobCountersXML() throws Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("history")
        .path("mapreduce").path("jobs").path(jobId).path("counters")
        .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
    String xml = response.getEntity(String.class);
    DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
    DocumentBuilder db = dbf.newDocumentBuilder();
    InputSource is = new InputSource();
    is.setCharacterStream(new StringReader(xml));
    Document dom = db.parse(is);
    NodeList info = dom.getElementsByTagName("jobCounters");
    verifyHsJobCountersXML(info, appContext.getJob(id));
  }
}
项目:hadoop    文件:TestHsWebServicesJobs.java   
@Test
public void testJobIdDefault() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("history")
        .path("mapreduce").path("jobs").path(jobId).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject info = json.getJSONObject("job");
    VerifyJobsUtils.verifyHsJob(info, appContext.getJob(id));
  }

}
项目:hadoop    文件:TestHsWebServicesTasks.java   
@Test
public void testTaskIdCountersDefault() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    for (Task task : jobsMap.get(id).getTasks().values()) {

      String tid = MRApps.toString(task.getID());
      ClientResponse response = r.path("ws").path("v1").path("history")
          .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
          .path("counters").get(ClientResponse.class);
      assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
      JSONObject json = response.getEntity(JSONObject.class);
      assertEquals("incorrect number of elements", 1, json.length());
      JSONObject info = json.getJSONObject("jobTaskCounters");
      verifyHsJobTaskCounters(info, task);
    }
  }
}
项目:hadoop    文件:TestJobImpl.java   
@Test
public void testTransitionsAtFailed() throws IOException {
  Configuration conf = new Configuration();
  AsyncDispatcher dispatcher = new AsyncDispatcher();
  dispatcher.init(conf);
  dispatcher.start();

  OutputCommitter committer = mock(OutputCommitter.class);
  doThrow(new IOException("forcefail"))
    .when(committer).setupJob(any(JobContext.class));
  CommitterEventHandler commitHandler =
      createCommitterEventHandler(dispatcher, committer);
  commitHandler.init(conf);
  commitHandler.start();

  AppContext mockContext = mock(AppContext.class);
  when(mockContext.hasSuccessfullyUnregistered()).thenReturn(false);
  JobImpl job = createStubbedJob(conf, dispatcher, 2, mockContext);
  JobId jobId = job.getID();
  job.handle(new JobEvent(jobId, JobEventType.JOB_INIT));
  assertJobState(job, JobStateInternal.INITED);
  job.handle(new JobStartEvent(jobId));
  assertJobState(job, JobStateInternal.FAILED);

  job.handle(new JobEvent(jobId, JobEventType.JOB_TASK_COMPLETED));
  assertJobState(job, JobStateInternal.FAILED);
  job.handle(new JobEvent(jobId, JobEventType.JOB_TASK_ATTEMPT_COMPLETED));
  assertJobState(job, JobStateInternal.FAILED);
  job.handle(new JobEvent(jobId, JobEventType.JOB_MAP_TASK_RESCHEDULED));
  assertJobState(job, JobStateInternal.FAILED);
  job.handle(new JobEvent(jobId, JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE));
  assertJobState(job, JobStateInternal.FAILED);
  Assert.assertEquals(JobState.RUNNING, job.getState());
  when(mockContext.hasSuccessfullyUnregistered()).thenReturn(true);
  Assert.assertEquals(JobState.FAILED, job.getState());

  dispatcher.stop();
  commitHandler.stop();
}
项目:hadoop    文件:MRApp.java   
@SuppressWarnings("rawtypes")
public TestJob(JobId jobId, ApplicationAttemptId applicationAttemptId,
    Configuration conf, EventHandler eventHandler,
    TaskAttemptListener taskAttemptListener, Clock clock,
    OutputCommitter committer, boolean newApiCommitter,
    String user, AppContext appContext,
    JobStateInternal forcedState, String diagnostic) {
  super(jobId, getApplicationAttemptId(applicationId, getStartCount()),
      conf, eventHandler, taskAttemptListener,
      new JobTokenSecretManager(), new Credentials(), clock,
      getCompletedTaskFromPreviousRun(), metrics, committer,
      newApiCommitter, user, System.currentTimeMillis(), getAllAMInfos(),
      appContext, forcedState, diagnostic);

  // This "this leak" is okay because the retained pointer is in an
  //  instance variable.
  localStateMachine = localFactory.make(this);
}
项目:hadoop    文件:TestAMWebServicesJobs.java   
@Test
public void testJobCountersXML() throws Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("mapreduce")
        .path("jobs").path(jobId).path("counters")
        .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
    String xml = response.getEntity(String.class);
    DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
    DocumentBuilder db = dbf.newDocumentBuilder();
    InputSource is = new InputSource();
    is.setCharacterStream(new StringReader(xml));
    Document dom = db.parse(is);
    NodeList info = dom.getElementsByTagName("jobCounters");
    verifyAMJobCountersXML(info, jobsMap.get(id));
  }
}
项目:hadoop    文件:MRClientService.java   
@SuppressWarnings("unchecked")
@Override
public KillJobResponse killJob(KillJobRequest request) 
  throws IOException {
  JobId jobId = request.getJobId();
  UserGroupInformation callerUGI = UserGroupInformation.getCurrentUser();
  String message = "Kill job " + jobId + " received from " + callerUGI
      + " at " + Server.getRemoteAddress();
  LOG.info(message);
  verifyAndGetJob(jobId, JobACL.MODIFY_JOB, false);
  appContext.getEventHandler().handle(
      new JobDiagnosticsUpdateEvent(jobId, message));
  appContext.getEventHandler().handle(
      new JobEvent(jobId, JobEventType.JOB_KILL));
  KillJobResponse response = 
    recordFactory.newRecordInstance(KillJobResponse.class);
  return response;
}
项目:hadoop    文件:TestHsWebServicesJobConf.java   
@Test
public void testJobConfDefault() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce")
        .path("jobs").path(jobId).path("conf").get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject info = json.getJSONObject("conf");
    verifyHsJobConf(info, jobsMap.get(id));
  }
}
项目:hadoop    文件:TestHsWebServicesJobConf.java   
@Test
public void testJobConfXML() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce")
        .path("jobs").path(jobId).path("conf")
        .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
    String xml = response.getEntity(String.class);
    DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
    DocumentBuilder db = dbf.newDocumentBuilder();
    InputSource is = new InputSource();
    is.setCharacterStream(new StringReader(xml));
    Document dom = db.parse(is);
    NodeList info = dom.getElementsByTagName("conf");
    verifyHsJobConfXML(info, jobsMap.get(id));
  }
}
项目:hadoop    文件:TestLocalContainerAllocator.java   
private static AppContext createAppContext() {
  ApplicationId appId = ApplicationId.newInstance(1, 1);
  ApplicationAttemptId attemptId =
      ApplicationAttemptId.newInstance(appId, 1);
  Job job = mock(Job.class);
  @SuppressWarnings("rawtypes")
  EventHandler eventHandler = mock(EventHandler.class);
  AppContext ctx = mock(AppContext.class);
  when(ctx.getApplicationID()).thenReturn(appId);
  when(ctx.getApplicationAttemptId()).thenReturn(attemptId);
  when(ctx.getJob(isA(JobId.class))).thenReturn(job);
  when(ctx.getClusterInfo()).thenReturn(
    new ClusterInfo(Resource.newInstance(10240, 1, 0)));
  when(ctx.getEventHandler()).thenReturn(eventHandler);
  return ctx;
}
项目:hadoop    文件:TestAMWebServicesAttempts.java   
@Test
public void testTaskAttempts() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    for (Task task : jobsMap.get(id).getTasks().values()) {

      String tid = MRApps.toString(task.getID());
      ClientResponse response = r.path("ws").path("v1").path("mapreduce")
          .path("jobs").path(jobId).path("tasks").path(tid).path("attempts")
          .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
      assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
      JSONObject json = response.getEntity(JSONObject.class);
      verifyAMTaskAttempts(json, task);
    }
  }
}
项目:hadoop    文件:TestHsWebServicesAttempts.java   
@Test
public void testTaskAttempts() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    for (Task task : jobsMap.get(id).getTasks().values()) {

      String tid = MRApps.toString(task.getID());
      ClientResponse response = r.path("ws").path("v1").path("history")
          .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
          .path("attempts").accept(MediaType.APPLICATION_JSON)
          .get(ClientResponse.class);
      assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
      JSONObject json = response.getEntity(JSONObject.class);
      verifyHsTaskAttempts(json, task);
    }
  }
}
项目:hadoop    文件:TestHsWebServicesTasks.java   
@Test
public void testTasksQueryReduce() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    String type = "r";
    ClientResponse response = r.path("ws").path("v1").path("history")
        .path("mapreduce").path("jobs").path(jobId).path("tasks")
        .queryParam("type", type).accept(MediaType.APPLICATION_JSON)
        .get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject tasks = json.getJSONObject("tasks");
    JSONArray arr = tasks.getJSONArray("task");
    assertEquals("incorrect number of elements", 1, arr.length());
    verifyHsTask(arr, jobsMap.get(id), type);
  }
}
项目:hadoop    文件:TestAMWebServicesTasks.java   
@Test
public void testTasksSlash() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    ClientResponse response = r.path("ws").path("v1").path("mapreduce")
        .path("jobs").path(jobId).path("tasks/")
        .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject tasks = json.getJSONObject("tasks");
    JSONArray arr = tasks.getJSONArray("task");
    assertEquals("incorrect number of elements", 2, arr.length());

    verifyAMTask(arr, jobsMap.get(id), null);
  }
}
项目:hadoop    文件:TestAMWebServicesTasks.java   
@Test
public void testTasksXML() throws JSONException, Exception {

  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    ClientResponse response = r.path("ws").path("v1").path("mapreduce")
        .path("jobs").path(jobId).path("tasks")
        .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
    String xml = response.getEntity(String.class);
    DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
    DocumentBuilder db = dbf.newDocumentBuilder();
    InputSource is = new InputSource();
    is.setCharacterStream(new StringReader(xml));
    Document dom = db.parse(is);
    NodeList tasks = dom.getElementsByTagName("tasks");
    assertEquals("incorrect number of elements", 1, tasks.getLength());
    NodeList task = dom.getElementsByTagName("task");
    verifyAMTaskXML(task, jobsMap.get(id));
  }
}
项目:hadoop    文件:TestAMWebServicesTasks.java   
@Test
public void testTasksQueryMap() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    String type = "m";
    ClientResponse response = r.path("ws").path("v1").path("mapreduce")
        .path("jobs").path(jobId).path("tasks").queryParam("type", type)
        .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject tasks = json.getJSONObject("tasks");
    JSONArray arr = tasks.getJSONArray("task");
    assertEquals("incorrect number of elements", 1, arr.length());
    verifyAMTask(arr, jobsMap.get(id), type);
  }
}
项目:hadoop    文件:TestHsWebServicesTasks.java   
@Test
public void testTasksQueryMap() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    String type = "m";
    ClientResponse response = r.path("ws").path("v1").path("history")
        .path("mapreduce").path("jobs").path(jobId).path("tasks")
        .queryParam("type", type).accept(MediaType.APPLICATION_JSON)
        .get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject tasks = json.getJSONObject("tasks");
    JSONArray arr = tasks.getJSONArray("task");
    assertEquals("incorrect number of elements", 1, arr.length());
    verifyHsTask(arr, jobsMap.get(id), type);
  }
}
项目:hadoop    文件:TestAMWebServicesJobs.java   
@Test
public void testJobAttemptsDefault() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);

    ClientResponse response = r.path("ws").path("v1")
        .path("mapreduce").path("jobs").path(jobId).path("jobattempts")
        .get(ClientResponse.class);
    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
    JSONObject json = response.getEntity(JSONObject.class);
    assertEquals("incorrect number of elements", 1, json.length());
    JSONObject info = json.getJSONObject("jobAttempts");
    verifyJobAttempts(info, jobsMap.get(id));
  }
}
项目:hadoop    文件:TaskIdPBImpl.java   
@Override
public synchronized void setJobId(JobId jobId) {
  maybeInitBuilder();
  if (jobId == null)
    builder.clearJobId();
  this.jobId = jobId;
}
项目:hadoop    文件:MRApps.java   
public static Path getStartJobCommitFile(Configuration conf, String user,
    JobId jobId) {
  Path startCommitFile = new Path(MRApps.getStagingAreaDir(conf, user),
      jobId.toString() + Path.SEPARATOR + "COMMIT_STARTED");
  return startCommitFile;
}
项目:hadoop    文件:TestJobInfo.java   
@Test(timeout = 10000)
public void testAverageMergeTime() throws IOException {
  String historyFileName =
      "job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
  String confFileName =
      "job_1329348432655_0001_conf.xml";
  Configuration conf = new Configuration();
  JobACLsManager jobAclsMgr = new JobACLsManager(conf);
  Path fulleHistoryPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(historyFileName)
          .getFile());
  Path fullConfPath =
      new Path(TestJobHistoryEntities.class.getClassLoader()
          .getResource(confFileName)
          .getFile());

  HistoryFileInfo info = mock(HistoryFileInfo.class);
  when(info.getConfFile()).thenReturn(fullConfPath);

  JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
  CompletedJob completedJob =
      new CompletedJob(conf, jobId, fulleHistoryPath, true, "user",
          info, jobAclsMgr);
  JobInfo jobInfo = new JobInfo(completedJob);
  // There are 2 tasks with merge time of 45 and 55 respectively. So average
  // merge time should be 50.
  Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
}
项目:hadoop    文件:MRBuilderUtils.java   
public static TaskId newTaskId(JobId jobId, int id, TaskType taskType) {
  TaskId taskId = Records.newRecord(TaskId.class);
  taskId.setJobId(jobId);
  taskId.setId(id);
  taskId.setTaskType(taskType);
  return taskId;
}
项目:hadoop    文件:TestRMContainerAllocator.java   
private ContainerFailedEvent createFailEvent(JobId jobId, int taskAttemptId,
    String host, boolean reduce) {
  TaskId taskId;
  if (reduce) {
    taskId = MRBuilderUtils.newTaskId(jobId, 0, TaskType.REDUCE);
  } else {
    taskId = MRBuilderUtils.newTaskId(jobId, 0, TaskType.MAP);
  }
  TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId,
      taskAttemptId);
  return new ContainerFailedEvent(attemptId, host);    
}
项目:hadoop    文件:PartialJob.java   
public PartialJob(JobIndexInfo jobIndexInfo, JobId jobId) {
  this.jobIndexInfo = jobIndexInfo;
  this.jobId = jobId;
  jobReport = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobReport.class);
  jobReport.setSubmitTime(jobIndexInfo.getSubmitTime());
  jobReport.setStartTime(jobIndexInfo.getJobStartTime());
  jobReport.setFinishTime(jobIndexInfo.getFinishTime());
  jobReport.setJobState(getState());
}
项目:hadoop    文件:TestUnnecessaryBlockingOnHistoryFileInfo.java   
/**
 * Wait until scanning of the intermediate directory finishes and load
 * of the given job is started.
 */
public void waitUntilIntermediateDirIsScanned(JobId jobId)
    throws InterruptedException {
  if(scanningDoneSignals.containsKey(jobId)) {
    scanningDoneSignals.get(jobId).await();
  }
}
项目:hadoop    文件:TestTaskImpl.java   
@Before 
@SuppressWarnings("unchecked")
public void setup() {
   dispatcher = new InlineDispatcher();

  ++startCount;

  conf = new JobConf();
  taskAttemptListener = mock(TaskAttemptListener.class);
  jobToken = (Token<JobTokenIdentifier>) mock(Token.class);
  remoteJobConfFile = mock(Path.class);
  credentials = null;
  clock = new SystemClock();
  metrics = mock(MRAppMetrics.class);  
  dataLocations = new String[1];

  appId = ApplicationId.newInstance(System.currentTimeMillis(), 1);

  jobId = Records.newRecord(JobId.class);
  jobId.setId(1);
  jobId.setAppId(appId);
  appContext = mock(AppContext.class);

  taskSplitMetaInfo = mock(TaskSplitMetaInfo.class);
  when(taskSplitMetaInfo.getLocations()).thenReturn(dataLocations); 

  taskAttempts = new ArrayList<MockTaskAttemptImpl>();    
}
项目:hadoop    文件:JobReportPBImpl.java   
@Override
public synchronized void setJobId(JobId jobId) {
  maybeInitBuilder();
  if (jobId == null) 
    builder.clearJobId();
  this.jobId = jobId;
}
项目:hadoop    文件:TestAMWebServicesAttempts.java   
@Test
public void testTaskAttemptIdXML() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    for (Task task : jobsMap.get(id).getTasks().values()) {

      String tid = MRApps.toString(task.getID());
      for (TaskAttempt att : task.getAttempts().values()) {
        TaskAttemptId attemptid = att.getID();
        String attid = MRApps.toString(attemptid);

        ClientResponse response = r.path("ws").path("v1").path("mapreduce")
            .path("jobs").path(jobId).path("tasks").path(tid)
            .path("attempts").path(attid).accept(MediaType.APPLICATION_XML)
            .get(ClientResponse.class);

        assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
        String xml = response.getEntity(String.class);
        DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
        DocumentBuilder db = dbf.newDocumentBuilder();
        InputSource is = new InputSource();
        is.setCharacterStream(new StringReader(xml));
        Document dom = db.parse(is);
        NodeList nodes = dom.getElementsByTagName("taskAttempt");
        for (int i = 0; i < nodes.getLength(); i++) {
          Element element = (Element) nodes.item(i);
          verifyAMTaskAttemptXML(element, att, task.getType());
        }
      }
    }
  }
}
项目:hadoop    文件:TestUnnecessaryBlockingOnHistoryFileInfo.java   
/**
 * Create, initialize and start an instance of HistoryFileManager.
 * @param config the configuration to initialize the HistoryFileManager
 *               instance.
 * @param jobIds the set of jobs expected to be loaded by HistoryFileManager.
 */
private HistoryFileManagerUnderContention createHistoryFileManager(
    Configuration config, JobId... jobIds) {
  HistoryFileManagerUnderContention historyFileManager =
      new HistoryFileManagerUnderContention(jobIds);
  historyFileManager.init(config);
  historyFileManager.start();
  return historyFileManager;
}
项目:hadoop    文件:TestHsWebServicesJobsQuery.java   
@Test
public void testJobsQueryFinishTimeBeginEnd() throws JSONException, Exception {
  WebResource r = resource();

  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  int size = jobsMap.size();
  // figure out the mid end time - we expect atleast 3 jobs
  ArrayList<Long> finishTime = new ArrayList<Long>(size);
  for (Map.Entry<JobId, Job> entry : jobsMap.entrySet()) {
    finishTime.add(entry.getValue().getReport().getFinishTime());
  }
  Collections.sort(finishTime);

  assertTrue("Error we must have atleast 3 jobs", size >= 3);
  long midFinishTime = finishTime.get(size - 2);

  ClientResponse response = r.path("ws").path("v1").path("history")
      .path("mapreduce").path("jobs")
      .queryParam("finishedTimeBegin", String.valueOf(40000))
      .queryParam("finishedTimeEnd", String.valueOf(midFinishTime))
      .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
  assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
  JSONObject json = response.getEntity(JSONObject.class);
  assertEquals("incorrect number of elements", 1, json.length());
  JSONObject jobs = json.getJSONObject("jobs");
  JSONArray arr = jobs.getJSONArray("job");
  assertEquals("incorrect number of elements", size - 1, arr.length());
}
项目:hadoop    文件:TestStagingCleanup.java   
@SuppressWarnings("resource")
private void testDeletionofStagingOnUnregistrationFailure(
    int maxAttempts, boolean shouldHaveDeleted) throws IOException {
  conf.set(MRJobConfig.MAPREDUCE_JOB_DIR, stagingJobDir);
  fs = mock(FileSystem.class);
  when(fs.delete(any(Path.class), anyBoolean())).thenReturn(true);
  //Staging Dir exists
  String user = UserGroupInformation.getCurrentUser().getShortUserName();
  Path stagingDir = MRApps.getStagingAreaDir(conf, user);
  when(fs.exists(stagingDir)).thenReturn(true);
  ApplicationId appId = ApplicationId.newInstance(0, 1);
  ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1);
  JobId jobid = recordFactory.newRecordInstance(JobId.class);
  jobid.setAppId(appId);
  TestMRApp appMaster = new TestMRApp(attemptId, null,
      JobStateInternal.RUNNING, maxAttempts);
  appMaster.crushUnregistration = true;
  appMaster.init(conf);
  appMaster.start();
  appMaster.shutDownJob();
  ((RunningAppContext) appMaster.getContext()).resetIsLastAMRetry();
  if (shouldHaveDeleted) {
    Assert.assertEquals(new Boolean(true), appMaster.isLastAMRetry());
    verify(fs).delete(stagingJobPath, true);
  } else {
    Assert.assertEquals(new Boolean(false), appMaster.isLastAMRetry());
    verify(fs, never()).delete(stagingJobPath, true);
  }
}
项目:hadoop    文件:TestContainerLauncherImpl.java   
public static TaskAttemptId makeTaskAttemptId(long ts, int appId, int taskId, 
    TaskType taskType, int id) {
  ApplicationId aID = ApplicationId.newInstance(ts, appId);
  JobId jID = MRBuilderUtils.newJobId(aID, id);
  TaskId tID = MRBuilderUtils.newTaskId(jID, taskId, taskType);
  return MRBuilderUtils.newTaskAttemptId(tID, id);
}
项目:hadoop    文件:ReduceTaskImpl.java   
public ReduceTaskImpl(JobId jobId, int partition,
    EventHandler eventHandler, Path jobFile, JobConf conf,
    int numMapTasks, TaskAttemptListener taskAttemptListener,
    Token<JobTokenIdentifier> jobToken,
    Credentials credentials, Clock clock,
    int appAttemptId, MRAppMetrics metrics, AppContext appContext) {
  super(jobId, TaskType.REDUCE, partition, eventHandler, jobFile, conf,
      taskAttemptListener, jobToken, credentials, clock,
      appAttemptId, metrics, appContext);
  this.numMapTasks = numMapTasks;
}
项目:hadoop    文件:TestAMWebServicesAttempts.java   
@Test
public void testTaskAttemptsXML() throws JSONException, Exception {
  WebResource r = resource();
  Map<JobId, Job> jobsMap = appContext.getAllJobs();
  for (JobId id : jobsMap.keySet()) {
    String jobId = MRApps.toString(id);
    for (Task task : jobsMap.get(id).getTasks().values()) {

      String tid = MRApps.toString(task.getID());
      ClientResponse response = r.path("ws").path("v1").path("mapreduce")
          .path("jobs").path(jobId).path("tasks").path(tid).path("attempts")
          .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);

      assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
      String xml = response.getEntity(String.class);
      DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
      DocumentBuilder db = dbf.newDocumentBuilder();
      InputSource is = new InputSource();
      is.setCharacterStream(new StringReader(xml));
      Document dom = db.parse(is);
      NodeList attempts = dom.getElementsByTagName("taskAttempts");
      assertEquals("incorrect number of elements", 1, attempts.getLength());

      NodeList nodes = dom.getElementsByTagName("taskAttempt");
      verifyAMTaskAttemptsXML(nodes, task);
    }
  }
}