protected ITestResult prepareMock(Class<?> tClass, Method method) { ITestResult result = mock(ITestResult.class); IClass clazz = mock(IClass.class); ITestNGMethod testNGMethod = mock(ITestNGMethod.class); ConstructorOrMethod cm = mock(ConstructorOrMethod.class); String methodName = method.getName(); when(result.getTestClass()).thenReturn(clazz); when(result.getTestClass().getRealClass()).thenReturn(tClass); when(clazz.getName()).thenReturn(this.getClass().getName()); when(result.getMethod()).thenReturn(testNGMethod); when(cm.getMethod()).thenReturn(method); when(result.getMethod().getConstructorOrMethod()).thenReturn(cm); when(testNGMethod.getMethodName()).thenReturn(methodName); ITestContext context = mock(ITestContext.class); when(result.getTestContext()).thenReturn(context); XmlTest xmlTest = new XmlTest(); XmlSuite suite = new XmlSuite(); xmlTest.setXmlSuite(suite); suite.setListeners(Arrays.asList(VideoListener.class.getName())); when(context.getCurrentXmlTest()).thenReturn(xmlTest); return result; }
/** * Flatten a list of test suite results into a collection of results grouped by test class. * This method basically strips away the TestNG way of organising tests and arranges * the results by test class. */ private Collection<TestClassResults> flattenResults(List<ISuite> suites) { Map<IClass, TestClassResults> flattenedResults = new HashMap<>(); for (ISuite suite : suites) { for (ISuiteResult suiteResult : suite.getResults().values()) { // Failed and skipped configuration methods are treated as test failures. organiseByClass(suiteResult.getTestContext().getFailedConfigurations().getAllResults(), flattenedResults); organiseByClass(suiteResult.getTestContext().getSkippedConfigurations().getAllResults(), flattenedResults); // Successful configuration methods are not included. organiseByClass(suiteResult.getTestContext().getFailedTests().getAllResults(), flattenedResults); organiseByClass(suiteResult.getTestContext().getSkippedTests().getAllResults(), flattenedResults); organiseByClass(suiteResult.getTestContext().getPassedTests().getAllResults(), flattenedResults); } } return flattenedResults.values(); }
@SuppressWarnings("unchecked") private <T extends Annotation> List<T> getAnnotationsOnClass(final ITestResult result, final Class<T> clazz) { return Stream.of(result) .map(ITestResult::getTestClass) .filter(Objects::nonNull) .map(IClass::getRealClass) .flatMap(aClass -> Stream.of(aClass.getAnnotationsByType(clazz))) .map(clazz::cast) .collect(Collectors.toList()); }
public void onTestFailure( ITestResult context ) { IClass c = context.getTestClass(); PrintStream out = getPrintStreamForClass( c ); classesWithErrors.add( c ); Throwable t = context.getThrowable(); out.println(); t.printStackTrace( out ); out.println( "^^^^^^^^^^ test " + context.getMethod().getMethodName() + " failed ^^^^^^^^^^" ); stderr.println( "XXX test failed : " + context.getMethod().getRealClass().getSimpleName() + "." + context.getMethod().getMethodName() + " (" + t.getClass().getSimpleName() + ")" ); super.onTestFailure( context ); }
/** * Look-up the results data for a particular test class. */ private TestClassResults getResultsForClass(Map<IClass, TestClassResults> flattenedResults, ITestResult testResult) { TestClassResults resultsForClass = flattenedResults.get(testResult.getTestClass()); if (resultsForClass == null) { resultsForClass = new TestClassResults(testResult.getTestClass()); flattenedResults.put(testResult.getTestClass(), resultsForClass); } return resultsForClass; }
public void onFinish( ITestContext testContext ) { /* restore saved stdout/stderr */ System.setOut( stdout ); System.setErr( stderr ); //assert ( testOutputFileMap.size() > 0 ); /* close open filehandles */ for ( PrintStream fh : handles.values() ) { // fh.flush(); fh.close(); } /* report test output files created */ int offset = outputDirectory.length() + 1; List<String> uniqueFilenames = new ArrayList<String>( new HashSet<String>( testOutputFileMap.values() ) ); Collections.sort( uniqueFilenames ); // derive set of files whose test classes had errors Set<String> filesWithErrors = new HashSet<String>(); for ( IClass c : classesWithErrors ) filesWithErrors.add( testOutputFileMap.get( c ) ); stderr.println(); stderr.println( "wrote " + uniqueFilenames.size() + " test output files to directory " + outputDirectory + ":" ); for ( String filename : uniqueFilenames ) { boolean containsErrors = filesWithErrors.contains( filename ); stderr.println( (containsErrors ? "!!! " : " ") + filename.substring( offset ) + (containsErrors ? " (contains errors)" : "") ); } /* if ( classesWithErrors.size() > 0 ) { Set<String> filesWithErrors = new HashSet<String>(); for ( IClass c : classesWithErrors ) filesWithErrors.add( testOutputFileMap.get( c ) ); List<String> sortedFilesWithErrors = new ArrayList<String>( filesWithErrors ); Collections.sort( sortedFilesWithErrors ); stderr.println(); stderr.println("test output files whose test classes had errors:"); for ( String filename : sortedFilesWithErrors ) stderr.println(" " + filename ); } */ super.onFinish( testContext ); }
/** * Creates a new output file for the passed test class name * if not already created, returning a {@link PrintStream} to this file, * and re-directing {@link System.out} and {@link System.err} to this stream. * Name of file created is: <tt>${outputDirectory}/${currentClassName}.output</tt>. * File handles are held open until all tests have been run, see {@link #onFinish}. */ protected PrintStream getPrintStreamForClass( org.testng.IClass test_class ) { Class<?> c = test_class.getRealClass(); String filename = outputDirectory + "/" + c.getName() + ".output" ; // open file for current test class // stderr.println("getting filehandle for class=" + c ); PrintStream out = null; try { synchronized ( handles ) { out = handles.get( test_class ); if ( out == null ) { //stderr.println("> creating new file '" + filename + "' for " + c); out = new PrintStream( filename ); testOutputFileMap.put( test_class, filename ); handles.put( test_class, out ); out.println("**************************************************"); out.println("* test output from " + c ); out.println("* test started at " + new Date() ); out.println("**************************************************"); out.println(); } else { //stderr.println("> using cached handle for " + c ); } } // end synchronized } catch ( Exception e ) { stderr.println( "Problem encountered while creating test output file: " + e ); e.printStackTrace(); } // (temporarily) redirect stdout/stderr to file try { System.out.flush(); System.err.flush(); System.setOut( out ); System.setErr( out ); } catch ( Exception e ) { stderr.println( "Problem encountered while re-directing stderr/stdout to file: " + e ); e.printStackTrace(); } assert out != null; return out; }
@Override public boolean canRunFromClass(IClass testClass) { return delegate.canRunFromClass(testClass); }
public int compare(IClass class1, IClass class2) { return class1.getName().compareTo(class2.getName()); }
private void organiseByClass(Set<ITestResult> testResults, Map<IClass, TestClassResults> flattenedResults) { for (ITestResult testResult : testResults) { getResultsForClass(flattenedResults, testResult).addResult(testResult); } }
private TestClassResults(IClass testClass) { this.testClass = testClass; }
public IClass getTestClass() { return testClass; }