@Test public void testLogsView2() throws IOException { LOG.info("HsLogsPage with data"); MockAppContext ctx = new MockAppContext(0, 1, 1, 1); Map<String, String> params = new HashMap<String, String>(); params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1) .toString()); params.put(NM_NODENAME, NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString()); params.put(ENTITY_STRING, "container_10_0001_01_000001"); params.put(APP_OWNER, "owner"); Injector injector = WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx, params); PrintWriter spyPw = WebAppTests.getPrintWriter(injector); verify(spyPw).write( "Aggregation is not enabled. Try the nodemanager at " + MockJobs.NM_HOST + ":" + MockJobs.NM_PORT); }
@Test public void testLogsViewBadStartEnd() throws IOException { LOG.info("HsLogsPage with bad start/end params"); MockAppContext ctx = new MockAppContext(0, 1, 1, 1); Map<String, String> params = new HashMap<String, String>(); params.put("start", "foo"); params.put("end", "bar"); params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1) .toString()); params.put(NM_NODENAME, NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString()); params.put(ENTITY_STRING, "container_10_0001_01_000001"); params.put(APP_OWNER, "owner"); Injector injector = WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx, params); PrintWriter spyPw = WebAppTests.getPrintWriter(injector); verify(spyPw).write("Invalid log start value: foo"); verify(spyPw).write("Invalid log end value: bar"); }
@Test public void testCountersOverRawCounters() { // Create basic class TaskReport report = Records.newRecord(TaskReport.class); org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters(); Counters altCounters = TypeConverter.toYarn(rCounters); // Set raw counters report.setRawCounters(rCounters); // Set real counters report.setCounters(altCounters); // Verify real counters has priority over raw Counters counters = report.getCounters(); assertNotEquals(null, counters); assertNotEquals(rCounters, altCounters); assertEquals(counters, altCounters); }
@Test public void testSingleCounterView() { AppContext appContext = new MockAppContext(0, 1, 1, 1); Job job = appContext.getAllJobs().values().iterator().next(); // add a failed task to the job without any counters Task failedTask = MockJobs.newTask(job.getID(), 2, 1, true); Map<TaskId,Task> tasks = job.getTasks(); tasks.put(failedTask.getID(), failedTask); Map<String, String> params = getJobParams(appContext); params.put(AMParams.COUNTER_GROUP, "org.apache.hadoop.mapreduce.FileSystemCounter"); params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS"); WebAppTests.testPage(SingleCounterPage.class, AppContext.class, appContext, params); }
@Test public void testLogsViewSingle() throws IOException { LOG.info("HsLogsPage with params for single log and data limits"); MockAppContext ctx = new MockAppContext(0, 1, 1, 1); Map<String, String> params = new HashMap<String, String>(); final Configuration conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true); params.put("start", "-2048"); params.put("end", "-1024"); params.put(CONTAINER_LOG_TYPE, "syslog"); params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1) .toString()); params.put(NM_NODENAME, NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString()); params.put(ENTITY_STRING, "container_10_0001_01_000001"); params.put(APP_OWNER, "owner"); Injector injector = WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx, params, new AbstractModule() { @Override protected void configure() { bind(Configuration.class).toInstance(conf); } }); PrintWriter spyPw = WebAppTests.getPrintWriter(injector); verify(spyPw).write( "Logs not available for container_10_0001_01_000001." + " Aggregation may not be complete, " + "Check back later or try the nodemanager at " + MockJobs.NM_HOST + ":" + MockJobs.NM_PORT); }
@Test public void testSetRawCounters() { TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class); org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters(); report.setRawCounters(rCounters); Counters counters = report.getCounters(); assertNotEquals(null, counters); }
@Test public void testBuildImplicitRawCounters() { TaskAttemptReportPBImpl report = new TaskAttemptReportPBImpl(); org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters(); report.setRawCounters(rCounters); MRProtos.TaskAttemptReportProto protoVal = report.getProto(); Counters counters = report.getCounters(); assertTrue(protoVal.hasCounters()); }
@Test public void testCountersOverRawCounters() { TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class); org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters(); Counters altCounters = TypeConverter.toYarn(rCounters); report.setRawCounters(rCounters); report.setCounters(altCounters); Counters counters = report.getCounters(); assertNotEquals(null, counters); assertNotEquals(rCounters, altCounters); assertEquals(counters, altCounters); }
@Test public void testSetNonNullCountersToNull() { // Create basic class TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class); // Set raw counters org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters(); report.setRawCounters(rCounters); // Verify getCounters converts properly from raw to real Counters counters = report.getCounters(); assertNotEquals(null, counters); // Clear counters to null and then verify report.setCounters(null); assertEquals(null, report.getCounters()); assertEquals(null, report.getRawCounters()); }
@Test public void testSetNonNullRawCountersToNull() { // Create basic class TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class); // Set raw counters org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters(); report.setRawCounters(rCounters); // Verify getCounters converts properly from raw to real Counters counters = report.getCounters(); assertNotEquals(null, counters); // Clear counters to null and then verify report.setRawCounters(null); assertEquals(null, report.getCounters()); assertEquals(null, report.getRawCounters()); }
@Test public void testSetRawCounters() { // Create basic class TaskReport report = Records.newRecord(TaskReport.class); org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters(); // Set raw counters report.setRawCounters(rCounters); // Verify getCounters converts properly from raw to real Counters counters = report.getCounters(); assertNotEquals(null, counters); }
@Test public void testBuildImplicitRawCounters() { // Create basic class TaskReportPBImpl report = new TaskReportPBImpl(); org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters(); // Set raw counters report.setRawCounters(rCounters); // Verify getProto method implicitly converts/sets real counters MRProtos.TaskReportProto protoVal = report.getProto(); assertTrue(protoVal.hasCounters()); }
@Test public void testSetNonNullCountersToNull() { // Create basic class TaskReport report = Records.newRecord(TaskReport.class); // Set raw counters org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters(); report.setRawCounters(rCounters); // Verify getCounters converts properly from raw to real Counters counters = report.getCounters(); assertNotEquals(null, counters); // Clear counters to null and then verify report.setCounters(null); assertEquals(null, report.getCounters()); assertEquals(null, report.getRawCounters()); }
@Test public void testSetNonNullRawCountersToNull() { // Create basic class TaskReport report = Records.newRecord(TaskReport.class); // Set raw counters org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters(); report.setRawCounters(rCounters); // Verify getCounters converts properly from raw to real Counters counters = report.getCounters(); assertNotEquals(null, counters); // Clear counters to null and then verify report.setRawCounters(null); assertEquals(null, report.getCounters()); assertEquals(null, report.getRawCounters()); }