Java 类org.apache.hadoop.mapreduce.test.system.TTTaskInfo 实例源码

项目:hadoop-2.6.0-cdh5.4.3    文件:TestFileOwner.java   
@Test
public void testFilePermission() throws Exception {
  wovenClient = cluster.getJTClient().getProxy();
  Configuration conf = new Configuration(cluster.getConf());
  FinishTaskControlAction.configureControlActionForJob(conf);
  SleepJob job = new SleepJob();
  job.setConf(conf);
  conf = job.setupJobConf(1, 0, 100, 100, 100, 100);
  JobConf jconf = new JobConf(conf);
  RunningJob rJob = cluster.getJTClient().getClient().submitJob(jconf);
  taskController = conf.get("mapred.task.tracker.task-controller");
  // get the job info so we can get the env variables from the daemon.
  // Now wait for the task to be in the running state, only then the
  // directories will be created
  JobInfo info = wovenClient.getJobInfo(rJob.getID());
  Assert.assertNotNull("JobInfo is null",info);
  JobID id = rJob.getID();
  while (info.runningMaps() != 1) {
    Thread.sleep(1000);
    info = wovenClient.getJobInfo(id);
  }
  TaskInfo[] myTaskInfos = wovenClient.getTaskInfo(id);
  for (TaskInfo tInfo : myTaskInfos) {
    if (!tInfo.isSetupOrCleanup()) {
      String[] taskTrackers = tInfo.getTaskTrackers();
      for (String taskTracker : taskTrackers) {
        TTInfo ttInfo = wovenClient.getTTInfo(taskTracker);
        TTClient ttCli = cluster.getTTClient(ttInfo.getStatus().getHost());
        Assert.assertNotNull("TTClient instance is null",ttCli);
        TTTaskInfo ttTaskInfo = ttCli.getProxy().getTask(tInfo.getTaskID());
        Assert.assertNotNull("TTTaskInfo is null",ttTaskInfo);
        while (ttTaskInfo.getTaskStatus().getRunState() !=
               TaskStatus.State.RUNNING) {
          Thread.sleep(100);
          ttTaskInfo = ttCli.getProxy().getTask(tInfo.getTaskID());
        }
        testPermissionWithTaskController(ttCli, conf, info);
        FinishTaskControlAction action = new FinishTaskControlAction(TaskID
            .downgrade(tInfo.getTaskID()));
        for (TTClient cli : cluster.getTTClients()) {
          cli.getProxy().sendAction(action);
        }
      }
    }
  }
  JobInfo jInfo = wovenClient.getJobInfo(id);
  jInfo = cluster.getJTClient().getProxy().getJobInfo(id);
  while (!jInfo.getStatus().isJobComplete()) {
    Thread.sleep(100);
    jInfo = cluster.getJTClient().getProxy().getJobInfo(id);
  }
}
项目:hadoop-on-lustre    文件:TestFileOwner.java   
@Test
public void testFilePermission() throws Exception {
  wovenClient = cluster.getJTClient().getProxy();
  Configuration conf = new Configuration(cluster.getConf());
  FinishTaskControlAction.configureControlActionForJob(conf);
  SleepJob job = new SleepJob();
  job.setConf(conf);
  conf = job.setupJobConf(1, 0, 100, 100, 100, 100);
  JobConf jconf = new JobConf(conf);
  RunningJob rJob = cluster.getJTClient().getClient().submitJob(jconf);
  taskController = conf.get("mapred.task.tracker.task-controller");
  // get the job info so we can get the env variables from the daemon.
  // Now wait for the task to be in the running state, only then the
  // directories will be created
  JobInfo info = wovenClient.getJobInfo(rJob.getID());
  Assert.assertNotNull("JobInfo is null",info);
  JobID id = rJob.getID();
  while (info.runningMaps() != 1) {
    Thread.sleep(1000);
    info = wovenClient.getJobInfo(id);
  }
  TaskInfo[] myTaskInfos = wovenClient.getTaskInfo(id);
  for (TaskInfo tInfo : myTaskInfos) {
    if (!tInfo.isSetupOrCleanup()) {
      String[] taskTrackers = tInfo.getTaskTrackers();
      for (String taskTracker : taskTrackers) {
        TTInfo ttInfo = wovenClient.getTTInfo(taskTracker);
        TTClient ttCli = cluster.getTTClient(ttInfo.getStatus().getHost());
        Assert.assertNotNull("TTClient instance is null",ttCli);
        TTTaskInfo ttTaskInfo = ttCli.getProxy().getTask(tInfo.getTaskID());
        Assert.assertNotNull("TTTaskInfo is null",ttTaskInfo);
        while (ttTaskInfo.getTaskStatus().getRunState() !=
               TaskStatus.State.RUNNING) {
          Thread.sleep(100);
          ttTaskInfo = ttCli.getProxy().getTask(tInfo.getTaskID());
        }
        testPermissionWithTaskController(ttCli, conf, info);
        FinishTaskControlAction action = new FinishTaskControlAction(TaskID
            .downgrade(tInfo.getTaskID()));
        for (TTClient cli : cluster.getTTClients()) {
          cli.getProxy().sendAction(action);
        }
      }
    }
  }
  JobInfo jInfo = wovenClient.getJobInfo(id);
  jInfo = cluster.getJTClient().getProxy().getJobInfo(id);
  while (!jInfo.getStatus().isJobComplete()) {
    Thread.sleep(100);
    jInfo = cluster.getJTClient().getProxy().getJobInfo(id);
  }
}
项目:hanoi-hadoop-2.0.0-cdh    文件:TestFileOwner.java   
@Test
public void testFilePermission() throws Exception {
  wovenClient = cluster.getJTClient().getProxy();
  Configuration conf = new Configuration(cluster.getConf());
  FinishTaskControlAction.configureControlActionForJob(conf);
  SleepJob job = new SleepJob();
  job.setConf(conf);
  conf = job.setupJobConf(1, 0, 100, 100, 100, 100);
  JobConf jconf = new JobConf(conf);
  RunningJob rJob = cluster.getJTClient().getClient().submitJob(jconf);
  taskController = conf.get("mapred.task.tracker.task-controller");
  // get the job info so we can get the env variables from the daemon.
  // Now wait for the task to be in the running state, only then the
  // directories will be created
  JobInfo info = wovenClient.getJobInfo(rJob.getID());
  Assert.assertNotNull("JobInfo is null",info);
  JobID id = rJob.getID();
  while (info.runningMaps() != 1) {
    Thread.sleep(1000);
    info = wovenClient.getJobInfo(id);
  }
  TaskInfo[] myTaskInfos = wovenClient.getTaskInfo(id);
  for (TaskInfo tInfo : myTaskInfos) {
    if (!tInfo.isSetupOrCleanup()) {
      String[] taskTrackers = tInfo.getTaskTrackers();
      for (String taskTracker : taskTrackers) {
        TTInfo ttInfo = wovenClient.getTTInfo(taskTracker);
        TTClient ttCli = cluster.getTTClient(ttInfo.getStatus().getHost());
        Assert.assertNotNull("TTClient instance is null",ttCli);
        TTTaskInfo ttTaskInfo = ttCli.getProxy().getTask(tInfo.getTaskID());
        Assert.assertNotNull("TTTaskInfo is null",ttTaskInfo);
        while (ttTaskInfo.getTaskStatus().getRunState() !=
               TaskStatus.State.RUNNING) {
          Thread.sleep(100);
          ttTaskInfo = ttCli.getProxy().getTask(tInfo.getTaskID());
        }
        testPermissionWithTaskController(ttCli, conf, info);
        FinishTaskControlAction action = new FinishTaskControlAction(TaskID
            .downgrade(tInfo.getTaskID()));
        for (TTClient cli : cluster.getTTClients()) {
          cli.getProxy().sendAction(action);
        }
      }
    }
  }
  JobInfo jInfo = wovenClient.getJobInfo(id);
  jInfo = cluster.getJTClient().getProxy().getJobInfo(id);
  while (!jInfo.getStatus().isJobComplete()) {
    Thread.sleep(100);
    jInfo = cluster.getJTClient().getProxy().getJobInfo(id);
  }
}
项目:mapreduce-fork    文件:TestFileOwner.java   
@Test
public void testFilePermission() throws Exception {
  wovenClient = cluster.getJTClient().getProxy();
  Configuration conf = new Configuration(cluster.getConf());
  FinishTaskControlAction.configureControlActionForJob(conf);
  SleepJob job = new SleepJob();
  job.setConf(conf);
  Job slpJob = job.createJob(1, 0, 100, 100, 100, 100);
  JobConf jconf = new JobConf(conf);
  slpJob.submit();
  RunningJob rJob =
      cluster.getJTClient().getClient().getJob(
          org.apache.hadoop.mapred.JobID.downgrade(slpJob.getJobID()));
  taskController = conf.get(TTConfig.TT_TASK_CONTROLLER);
  // get the job info so we can get the env variables from the daemon.
  // Now wait for the task to be in the running state, only then the
  // directories will be created
  JobInfo info = wovenClient.getJobInfo(rJob.getID());
  Assert.assertNotNull("JobInfo is null", info);
  JobID id = rJob.getID();
  while (info.runningMaps() != 1) {
    Thread.sleep(1000);
    info = wovenClient.getJobInfo(id);
  }
  TaskInfo[] myTaskInfos = wovenClient.getTaskInfo(id);
  for (TaskInfo tInfo : myTaskInfos) {
    if (!tInfo.isSetupOrCleanup()) {
      String[] taskTrackers = tInfo.getTaskTrackers();
      for (String taskTracker : taskTrackers) {
        TTInfo ttInfo = wovenClient.getTTInfo(taskTracker);
        TTClient ttCli = cluster.getTTClient(ttInfo.getStatus().getHost());
        Assert.assertNotNull("TTClient instance is null", ttCli);
        TTTaskInfo ttTaskInfo = ttCli.getProxy().getTask(tInfo.getTaskID());
        Assert.assertNotNull("TTTaskInfo is null", ttTaskInfo);
        while (ttTaskInfo.getTaskStatus().getRunState() != TaskStatus.State.RUNNING) {
          Thread.sleep(100);
          ttTaskInfo = ttCli.getProxy().getTask(tInfo.getTaskID());
        }
        testPermissionWithTaskController(ttCli, conf, info);
        FinishTaskControlAction action =
            new FinishTaskControlAction(TaskID.downgrade(tInfo.getTaskID()));
        for (TTClient cli : cluster.getTTClients()) {
          cli.getProxy().sendAction(action);
        }
      }
    }
  }
  JobInfo jInfo = wovenClient.getJobInfo(id);
  jInfo = cluster.getJTClient().getProxy().getJobInfo(id);
  while (!jInfo.getStatus().isJobComplete()) {
    Thread.sleep(100);
    jInfo = cluster.getJTClient().getProxy().getJobInfo(id);
  }
}
项目:hortonworks-extension    文件:TestFileOwner.java   
@Test
public void testFilePermission() throws Exception {
  wovenClient = cluster.getJTClient().getProxy();
  Configuration conf = new Configuration(cluster.getConf());
  FinishTaskControlAction.configureControlActionForJob(conf);
  SleepJob job = new SleepJob();
  job.setConf(conf);
  conf = job.setupJobConf(1, 0, 100, 100, 100, 100);
  JobConf jconf = new JobConf(conf);
  RunningJob rJob = cluster.getJTClient().getClient().submitJob(jconf);
  taskController = conf.get("mapred.task.tracker.task-controller");
  // get the job info so we can get the env variables from the daemon.
  // Now wait for the task to be in the running state, only then the
  // directories will be created
  JobInfo info = wovenClient.getJobInfo(rJob.getID());
  Assert.assertNotNull("JobInfo is null",info);
  JobID id = rJob.getID();
  while (info.runningMaps() != 1) {
    Thread.sleep(1000);
    info = wovenClient.getJobInfo(id);
  }
  TaskInfo[] myTaskInfos = wovenClient.getTaskInfo(id);
  for (TaskInfo tInfo : myTaskInfos) {
    if (!tInfo.isSetupOrCleanup()) {
      String[] taskTrackers = tInfo.getTaskTrackers();
      for (String taskTracker : taskTrackers) {
        TTInfo ttInfo = wovenClient.getTTInfo(taskTracker);
        TTClient ttCli = cluster.getTTClient(ttInfo.getStatus().getHost());
        Assert.assertNotNull("TTClient instance is null",ttCli);
        TTTaskInfo ttTaskInfo = ttCli.getProxy().getTask(tInfo.getTaskID());
        Assert.assertNotNull("TTTaskInfo is null",ttTaskInfo);
        while (ttTaskInfo.getTaskStatus().getRunState() !=
               TaskStatus.State.RUNNING) {
          Thread.sleep(100);
          ttTaskInfo = ttCli.getProxy().getTask(tInfo.getTaskID());
        }
        testPermissionWithTaskController(ttCli, conf, info);
        FinishTaskControlAction action = new FinishTaskControlAction(TaskID
            .downgrade(tInfo.getTaskID()));
        for (TTClient cli : cluster.getTTClients()) {
          cli.getProxy().sendAction(action);
        }
      }
    }
  }
  JobInfo jInfo = wovenClient.getJobInfo(id);
  jInfo = cluster.getJTClient().getProxy().getJobInfo(id);
  while (!jInfo.getStatus().isJobComplete()) {
    Thread.sleep(100);
    jInfo = cluster.getJTClient().getProxy().getJobInfo(id);
  }
}
项目:hortonworks-extension    文件:TestFileOwner.java   
@Test
public void testFilePermission() throws Exception {
  wovenClient = cluster.getJTClient().getProxy();
  Configuration conf = new Configuration(cluster.getConf());
  FinishTaskControlAction.configureControlActionForJob(conf);
  SleepJob job = new SleepJob();
  job.setConf(conf);
  conf = job.setupJobConf(1, 0, 100, 100, 100, 100);
  JobConf jconf = new JobConf(conf);
  RunningJob rJob = cluster.getJTClient().getClient().submitJob(jconf);
  taskController = conf.get("mapred.task.tracker.task-controller");
  // get the job info so we can get the env variables from the daemon.
  // Now wait for the task to be in the running state, only then the
  // directories will be created
  JobInfo info = wovenClient.getJobInfo(rJob.getID());
  Assert.assertNotNull("JobInfo is null",info);
  JobID id = rJob.getID();
  while (info.runningMaps() != 1) {
    Thread.sleep(1000);
    info = wovenClient.getJobInfo(id);
  }
  TaskInfo[] myTaskInfos = wovenClient.getTaskInfo(id);
  for (TaskInfo tInfo : myTaskInfos) {
    if (!tInfo.isSetupOrCleanup()) {
      String[] taskTrackers = tInfo.getTaskTrackers();
      for (String taskTracker : taskTrackers) {
        TTInfo ttInfo = wovenClient.getTTInfo(taskTracker);
        TTClient ttCli = cluster.getTTClient(ttInfo.getStatus().getHost());
        Assert.assertNotNull("TTClient instance is null",ttCli);
        TTTaskInfo ttTaskInfo = ttCli.getProxy().getTask(tInfo.getTaskID());
        Assert.assertNotNull("TTTaskInfo is null",ttTaskInfo);
        while (ttTaskInfo.getTaskStatus().getRunState() !=
               TaskStatus.State.RUNNING) {
          Thread.sleep(100);
          ttTaskInfo = ttCli.getProxy().getTask(tInfo.getTaskID());
        }
        testPermissionWithTaskController(ttCli, conf, info);
        FinishTaskControlAction action = new FinishTaskControlAction(TaskID
            .downgrade(tInfo.getTaskID()));
        for (TTClient cli : cluster.getTTClients()) {
          cli.getProxy().sendAction(action);
        }
      }
    }
  }
  JobInfo jInfo = wovenClient.getJobInfo(id);
  jInfo = cluster.getJTClient().getProxy().getJobInfo(id);
  while (!jInfo.getStatus().isJobComplete()) {
    Thread.sleep(100);
    jInfo = cluster.getJTClient().getProxy().getJobInfo(id);
  }
}