Java 类org.springframework.batch.core.launch.NoSuchJobException 实例源码

项目:eMonocot    文件:PalmwebIntegrationTest.java   
@Test
public void testCreateGenericArchive() throws NoSuchJobException,
JobExecutionAlreadyRunningException, JobRestartException,
JobInstanceAlreadyCompleteException, JobParametersInvalidException, IOException {
    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();

    JobParameters jobParameters = new JobParameters(parameters);

    Job palmwebArchive = jobLocator.getJob("PalmWeb");
    assertNotNull("Palmweb must not be null",  palmwebArchive);
    JobExecution jobExecution = jobLauncher.run(palmwebArchive, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount());
    }
}
项目:spring-cloud-dashboard    文件:RestControllerAdvice.java   
/**
 * Log the exception message at warn level and stack trace as trace level.
 * Return response status HttpStatus.NOT_FOUND
 */
@ExceptionHandler({NoSuchAppRegistrationException.class,
        NoSuchTaskDefinitionException.class,
        NoSuchTaskExecutionException.class,
        NoSuchJobExecutionException.class,
        NoSuchJobInstanceException.class,
        NoSuchJobException.class,
        NoSuchStepExecutionException.class,
        MetricsMvcEndpoint.NoSuchMetricException.class})
@ResponseStatus(HttpStatus.NOT_FOUND)
@ResponseBody
public VndErrors onNotFoundException(Exception e) {
    String logref = logWarnLevelExceptionMessage(e);
    if (logger.isTraceEnabled()) {
        logTraceLevelStrackTrace(e);
    }
    String msg = getExceptionMessage(e);
    return new VndErrors(logref, msg);
}
项目:https-github.com-g0t4-jenkins2-course-spring-boot    文件:JobLauncherCommandLineRunner.java   
private void executeRegisteredJobs(JobParameters jobParameters)
        throws JobExecutionException {
    if (this.jobRegistry != null && StringUtils.hasText(this.jobNames)) {
        String[] jobsToRun = this.jobNames.split(",");
        for (String jobName : jobsToRun) {
            try {
                Job job = this.jobRegistry.getJob(jobName);
                if (this.jobs.contains(job)) {
                    continue;
                }
                execute(job, jobParameters);
            }
            catch (NoSuchJobException ex) {
                logger.debug("No job found in registry for job name: " + jobName);
                continue;
            }
        }
    }
}
项目:spring-boot-concourse    文件:JobLauncherCommandLineRunner.java   
private void executeRegisteredJobs(JobParameters jobParameters)
        throws JobExecutionException {
    if (this.jobRegistry != null && StringUtils.hasText(this.jobNames)) {
        String[] jobsToRun = this.jobNames.split(",");
        for (String jobName : jobsToRun) {
            try {
                Job job = this.jobRegistry.getJob(jobName);
                if (this.jobs.contains(job)) {
                    continue;
                }
                execute(job, jobParameters);
            }
            catch (NoSuchJobException ex) {
                logger.debug("No job found in registry for job name: " + jobName);
                continue;
            }
        }
    }
}
项目:spring-cloud-dataflow    文件:RestControllerAdvice.java   
/**
 * Log the exception message at warn level and stack trace as trace level. Return
 * response status HttpStatus.NOT_FOUND
 *
 * @param e one of the exceptions, {@link NoSuchStreamDefinitionException},
 * {@link NoSuchAppRegistrationException}, {@link NoSuchTaskDefinitionException},
 * {@link NoSuchTaskExecutionException}, {@link NoSuchJobExecutionException},
 * {@link NoSuchJobInstanceException}, {@link NoSuchJobException},
 * {@link NoSuchStepExecutionException},
 * {@link MetricsMvcEndpoint.NoSuchMetricException}, {@link NoSuchAppException}, or
 * {@link NoSuchAppInstanceException}
 * @return the error response in JSON format with media type
 * application/vnd.error+json
 */
@ExceptionHandler({ NoSuchStreamDefinitionException.class, NoSuchAppRegistrationException.class,
        NoSuchTaskDefinitionException.class, NoSuchTaskExecutionException.class, NoSuchJobExecutionException.class,
        NoSuchJobInstanceException.class, NoSuchJobException.class, NoSuchStepExecutionException.class,
        MetricsMvcEndpoint.NoSuchMetricException.class, NoSuchAppException.class,
        NoSuchAppInstanceException.class, ApplicationDoesNotExistException.class })
@ResponseStatus(HttpStatus.NOT_FOUND)
@ResponseBody
public VndErrors onNotFoundException(Exception e) {
    String logref = logWarnLevelExceptionMessage(e);
    if (logger.isTraceEnabled()) {
        logTraceLevelStrackTrace(e);
    }
    String msg = getExceptionMessage(e);
    return new VndErrors(logref, msg);
}
项目:contestparser    文件:JobLauncherCommandLineRunner.java   
private void executeRegisteredJobs(JobParameters jobParameters)
        throws JobExecutionException {
    if (this.jobRegistry != null && StringUtils.hasText(this.jobNames)) {
        String[] jobsToRun = this.jobNames.split(",");
        for (String jobName : jobsToRun) {
            try {
                Job job = this.jobRegistry.getJob(jobName);
                if (this.jobs.contains(job)) {
                    continue;
                }
                execute(job, jobParameters);
            }
            catch (NoSuchJobException nsje) {
                logger.debug("No job found in registry for job name: " + jobName);
                continue;
            }
        }
    }
}
项目:eMonocot    文件:JobLauncherImpl.java   
@Override
public void launch(JobLaunchRequest request) {
    Job job;
    try {
        job = jobLocator.getJob(request.getJob());
        Map<String, JobParameter> jobParameterMap = new HashMap<String, JobParameter>();
        for(String parameterName : request.getParameters().keySet()) {
            jobParameterMap.put(parameterName, new JobParameter(request.getParameters().get(parameterName)));
        }
        JobParameters jobParameters = new JobParameters(jobParameterMap);
        try {
            jobLauncher.run(job, jobParameters);
        } catch (JobExecutionAlreadyRunningException jeare) {
            jobStatusNotifier.notify(new JobExecutionException(jeare.getLocalizedMessage()), request.getParameters().get("resource.identifier"));
        } catch (JobRestartException jre) {
            jobStatusNotifier.notify(new JobExecutionException(jre.getLocalizedMessage()), request.getParameters().get("resource.identifier"));
        } catch (JobInstanceAlreadyCompleteException jiace) {
            jobStatusNotifier.notify(new JobExecutionException(jiace.getLocalizedMessage()), request.getParameters().get("resource.identifier"));
        } catch (JobParametersInvalidException jpie) {
            jobStatusNotifier.notify(new JobExecutionException(jpie.getLocalizedMessage()), request.getParameters().get("resource.identifier"));
        }
    } catch (NoSuchJobException nsje) {
        jobStatusNotifier.notify(new JobExecutionException(nsje.getLocalizedMessage()), request.getParameters().get("resource.identifier"));
    }
}
项目:eMonocot    文件:ImageProcessingJobIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testNotModifiedResponse() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {
    Map<String, JobParameter> parameters = new HashMap<String, JobParameter>();
    parameters.put("query.string", new JobParameter("select i from Image i"));

    JobParameters jobParameters = new JobParameters(parameters);

    Job job = jobLocator.getJob("ImageProcessing");
    assertNotNull("ImageProcessing must not be null", job);
    JobExecution jobExecution = jobLauncher.run(job, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");

    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount());
    }
}
项目:marklogic-spring-batch    文件:MarkLogicJobInstanceDao.java   
@Override
public int getJobInstanceCount(String jobName) throws NoSuchJobException {
    StructuredQueryBuilder qb = new StructuredQueryBuilder(properties.getSearchOptions());
    StructuredQueryDefinition querydef =
            qb.and(
                    qb.valueConstraint("jobName", jobName),
                    qb.collection(properties.getJobInstanceCollection())
            );
    QueryManager queryMgr = databaseClient.newQueryManager();
    SearchHandle results = queryMgr.search(querydef, new SearchHandle());
    int count = (int) results.getTotalResults();
    if (count == 0) {
        throw new NoSuchJobException(jobName + " not found");
    } else {
        return count;
    }
}
项目:marklogic-spring-batch    文件:CommandLineJobRunnerTests.java   
@Override
public int getJobInstanceCount(String jobName)
        throws NoSuchJobException {
    int count = 0;

    for (JobInstance jobInstance : jobInstances) {
        if(jobInstance.getJobName().equals(jobName)) {
            count++;
        }
    }

    if(count == 0) {
        throw new NoSuchJobException("Unable to find job instances for " + jobName);
    } else {
        return count;
    }
}
项目:powop    文件:JobLauncherImpl.java   
@Override
public void launch(JobLaunchRequest request) {
    Job job;
    try {
        job = jobLocator.getJob(request.getJob());
        Map<String, JobParameter> jobParameterMap = new HashMap<String, JobParameter>();
        for(String parameterName : request.getParameters().keySet()) {
            jobParameterMap.put(parameterName, new JobParameter(request.getParameters().get(parameterName)));
        }

        JobParameters jobParameters = new JobParameters(jobParameterMap);
        jobLauncher.run(job, jobParameters);
    } catch (NoSuchJobException 
            | JobExecutionAlreadyRunningException
            | JobRestartException
            | JobInstanceAlreadyCompleteException
            | JobParametersInvalidException exception) {
            jobStatusNotifier.notify(new JobExecutionException(exception.getLocalizedMessage()), request.getParameters().get("job.configuration.id"));
    }
}
项目:powop    文件:PalmwebIntegrationTest.java   
@Test
public void testCreateGenericArchive() throws NoSuchJobException,
JobExecutionAlreadyRunningException, JobRestartException,
JobInstanceAlreadyCompleteException, JobParametersInvalidException, IOException {
    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();

    JobParameters jobParameters = new JobParameters(parameters);

    Job palmwebArchive = jobLocator.getJob("PalmWeb");
    assertNotNull("Palmweb must not be null",  palmwebArchive);
    JobExecution jobExecution = jobLauncher.run(palmwebArchive, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount());
    }
}
项目:spring-batch-experiments    文件:StopTest.java   
private void waitForTermination(Job job) throws NoSuchJobException,
        InterruptedException {
    int timeout = 10000;
    int current = 0;

    while (jobOperator.getRunningExecutions(job.getName()).size() > 0 && current < timeout) {
        Thread.sleep(100);
        current += 100;
    }

    if (jobOperator.getRunningExecutions(job.getName()).size() > 0) {
        throw new IllegalStateException("the execution hasn't stopped " +
                                                "in the expected period (timeout = " + timeout + " ms)." +
                                                "Consider increasing the timeout before checking if it's a bug.");
    }
}
项目:spring-batch-article    文件:CustomerReportJobConfig.java   
@PreDestroy
public void destroy() throws NoSuchJobException {
    jobs.getJobNames().forEach(name -> log.info("job name: {}", name));
    jobs.getJobInstances(JOB_NAME, 0, jobs.getJobInstanceCount(JOB_NAME)).forEach(
        jobInstance -> {
            log.info("job instance id {}", jobInstance.getInstanceId());
        }
    );

}
项目:spring-cloud-dataflow    文件:JobInstanceController.java   
/**
 * Return a page-able list of {@link JobInstanceResource} defined jobs.
 *
 * @param jobName the name of the job
 * @param pageable page-able collection of {@link JobInstance}s.
 * @param assembler for the {@link JobInstance}s
 * @return a list of Job Instance
 * @throws NoSuchJobException if the job for jobName specified does not exist.
 */
@RequestMapping(value = "", method = RequestMethod.GET, params = "name")
@ResponseStatus(HttpStatus.OK)
public PagedResources<JobInstanceResource> list(@RequestParam("name") String jobName, Pageable pageable,
        PagedResourcesAssembler<JobInstanceExecutions> assembler) throws NoSuchJobException {
    List<JobInstanceExecutions> jobInstances = taskJobService.listTaskJobInstancesForJobName(pageable, jobName);
    Page<JobInstanceExecutions> page = new PageImpl<>(jobInstances, pageable,
            taskJobService.countJobInstances(jobName));
    return assembler.toResource(page, jobAssembler);
}
项目:spring-cloud-dataflow    文件:JobExecutionController.java   
/**
 * Retrieve all task job executions with the task name specified
 *
 * @param jobName name of the job
 * @param pageable page-able collection of {@code TaskJobExecution}s.
 * @param assembler for the {@link TaskJobExecution}s
 * @return list task/job executions with the specified jobName.
 * @throws NoSuchJobException if the job with the given name does not exist.
 */
@RequestMapping(value = "", method = RequestMethod.GET, params = "name", produces = "application/json")
@ResponseStatus(HttpStatus.OK)
public PagedResources<JobExecutionResource> retrieveJobsByName(@RequestParam("name") String jobName,
        Pageable pageable, PagedResourcesAssembler<TaskJobExecution> assembler) throws NoSuchJobException {
    List<TaskJobExecution> jobExecutions = taskJobService.listJobExecutionsForJob(pageable, jobName);
    Page<TaskJobExecution> page = new PageImpl<>(jobExecutions, pageable,
            taskJobService.countJobExecutionsForJob(jobName));
    return assembler.toResource(page, jobAssembler);
}
项目:spring-cloud-dataflow    文件:DefaultTaskJobService.java   
/**
 * Retrieves Pageable list of {@link JobInstanceExecutions} from the JobRepository with a
 * specific jobName and matches the data with the associated JobExecutions.
 *
 * @param pageable enumerates the data to be returned.
 * @param jobName the name of the job for which to search.
 * @return List containing {@link JobInstanceExecutions}.
 */
@Override
public List<JobInstanceExecutions> listTaskJobInstancesForJobName(Pageable pageable, String jobName)
        throws NoSuchJobException {
    Assert.notNull(pageable, "pageable must not be null");
    Assert.notNull(jobName, "jobName must not be null");
    List<JobInstanceExecutions> taskJobInstances = new ArrayList<>();
    for (JobInstance jobInstance : jobService.listJobInstances(jobName, pageable.getOffset(),
            pageable.getPageSize())) {
        taskJobInstances.add(getJobInstanceExecution(jobInstance));
    }
    return taskJobInstances;
}
项目:eMonocot    文件:TaxonMatchIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testMatchTaxa() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {

    Session session = sessionFactory.openSession();
    Transaction tx = session.beginTransaction();

    List<Taxon> taxa = session.createQuery("from Taxon as taxon").list();
    solrIndexingListener.indexObjects(taxa);
    tx.commit();

    ClassPathResource input = new ClassPathResource("/org/emonocot/job/taxonmatch/input.csv");
    Map<String, JobParameter> parameters = new HashMap<String, JobParameter>();
    parameters.put("assume.accepted.matches", new JobParameter(Boolean.TRUE.toString()));
    parameters.put("input.file", new JobParameter(input.getFile().getAbsolutePath()));
    parameters.put("output.file", new JobParameter(File.createTempFile("output", "csv").getAbsolutePath()));

    JobParameters jobParameters = new JobParameters(parameters);

    Job taxonMatchingJob = jobLocator.getJob("TaxonMatching");
    assertNotNull("TaxonMatching must not be null", taxonMatchingJob);
    JobExecution jobExecution = jobLauncher.run(taxonMatchingJob, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");

    FileReader file = new FileReader(jobParameters.getParameters().get("output.file").getValue().toString());
    BufferedReader reader = new BufferedReader(file);
    assertNotNull("There should be an output file", reader);
    String ln;
    while ((ln = reader.readLine()) != null) {
        logger.debug(ln);
    }
}
项目:eMonocot    文件:IUCNJobIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testNotModifiedResponse() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {
    Session session = sessionFactory.openSession();
    Transaction tx = session.beginTransaction();

    List<Taxon> taxa = session.createQuery("from Taxon as taxon").list();
    solrIndexingListener.indexObjects(taxa);
    tx.commit();

    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();
    parameters.put("authority.name", new JobParameter("test"));
    String repository = properties.getProperty("test.resource.baseUrl");
    parameters.put("authority.uri", new JobParameter(repository + "iucn.json"));
    parameters.put("authority.last.harvested",
            new JobParameter(Long.toString((IUCNJobIntegrationTest.PAST_DATETIME.getMillis()))));


    JobParameters jobParameters = new JobParameters(parameters);

    Job job = jobLocator.getJob("IUCNImport");
    assertNotNull("IUCNImport must not be null", job);
    JobExecution jobExecution = jobLauncher.run(job, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount());
    }
}
项目:eMonocot    文件:DeltaNaturalLanguageDwcIntegrationTest.java   
@Test
public void testCreateGenericArchive() throws NoSuchJobException,
JobExecutionAlreadyRunningException, JobRestartException,
JobInstanceAlreadyCompleteException, JobParametersInvalidException, IOException {
    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();
    parameters.put("specs.file", new JobParameter(specsFile.getFile().getAbsolutePath()));
    parameters.put("items.file", new JobParameter(itemsFile.getFile().getAbsolutePath()));
    parameters.put("taxon.file", new JobParameter(taxonFile.getFile().getAbsolutePath()));
    parameters.put("fields.terminated.by", new JobParameter(","));
    parameters.put("fields.enclosed.by", new JobParameter("\""));
    parameters.put("output.fields",new JobParameter("taxonID,description,language,license"));
    parameters.put("taxon.file.skip.lines", new JobParameter("1"));
    parameters.put("taxon.file.field.names", new JobParameter("taxonID,subfamily,subtribe,tribe,accessRights,bibliographicCitation,created,license,modified,references,rights,rightsHolder,acceptedNameUsage,acceptedNameUsageID,class,datasetID,datasetName,family,genus,infraspecificEpithet,kingdom,nameAccordingTo,namePublishedIn,namePublishedInID,namePublishedInYear,nomenclaturalCode,nomenclaturalStatus,order,originalNameUsage,originalNameUsageID,parentNameUsage,parentNameUsageID,phylum,scientificName,scientificNameAuthorship,scientificNameID,specificEpithet,subgenus,taxonRank,taxonRemarks,taxonomicStatus,verbatimTaxonRank"));
    parameters.put("taxon.file.delimiter", new JobParameter("\t"));
    parameters.put("taxon.file.quote.character", new JobParameter("\""));
    parameters.put("description.file.field.names", new JobParameter("taxonID,description,license,language,type,rights"));
    parameters.put("description.default.values", new JobParameter("license=http://creativecommons.org/licenses/by-nc-sa/3.0,language=EN,type=general,rights=© Copyright The Board of Trustees\\, Royal Botanic Gardens\\, Kew."));
    parameters.put("archive.file", new JobParameter(UUID.randomUUID().toString()));

    JobParameters jobParameters = new JobParameters(parameters);

    Job deltaToDwC = jobLocator.getJob("DeltaToDwC");
    assertNotNull("DeltaToDwC must not be null",  deltaToDwC);
    JobExecution jobExecution = jobLauncher.run(deltaToDwC, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount());
    }

}
项目:eMonocot    文件:GrassbaseIntegrationTest.java   
@Test
public void testCreateGenericArchive() throws NoSuchJobException,
JobExecutionAlreadyRunningException, JobRestartException,
JobInstanceAlreadyCompleteException, JobParametersInvalidException, IOException {
    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();
    parameters.put("species.specs.file", new JobParameter(speciesSpecsFile.getFile().getAbsolutePath()));
    parameters.put("species.items.file", new JobParameter(speciesItemsFile.getFile().getAbsolutePath()));
    parameters.put("species.taxon.file", new JobParameter(speciesTaxonFile.getFile().getAbsolutePath()));
    parameters.put("genera.specs.file", new JobParameter(generaSpecsFile.getFile().getAbsolutePath()));
    parameters.put("genera.items.file", new JobParameter(generaItemsFile.getFile().getAbsolutePath()));
    parameters.put("genera.taxon.file", new JobParameter(generaTaxonFile.getFile().getAbsolutePath()));
    parameters.put("images.file", new JobParameter(imagesFile.getFile().getAbsolutePath()));

    JobParameters jobParameters = new JobParameters(parameters);

    Job deltaToDwC = jobLocator.getJob("Grassbase");
    assertNotNull("Grassbase must not be null",  deltaToDwC);
    JobExecution jobExecution = jobLauncher.run(deltaToDwC, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount());
    }

}
项目:eMonocot    文件:GBIFJobIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testNotModifiedResponse() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {
    Session session = sessionFactory.openSession();
    Transaction tx = session.beginTransaction();

    List<Taxon> taxa = session.createQuery("from Taxon as taxon").list();
    solrIndexingListener.indexObjects(taxa);
    tx.commit();

    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();
    parameters.put("family", new JobParameter("Araceae"));
    parameters.put("authority.name", new JobParameter("test"));
    String repository = properties.getProperty("test.resource.baseUrl",
            "http://build.e-monocot.org/git/?p=emonocot.git;a=blob_plain;f=emonocot-harvest/src/test/resources/org/emonocot/job/common/");
    parameters.put("authority.uri", new JobParameter(repository + "list.xml"));

    //parameters.put("authority.uri", new JobParameter("http://data.gbif.org/ws/rest/occurrence/list?taxonconceptkey=6979&maxresults=1000&typesonly=true&format=darwin&mode=raw&startindex="));
    parameters.put("authority.last.harvested",
            new JobParameter(Long.toString((GBIFJobIntegrationTest.PAST_DATETIME.getMillis()))));


    JobParameters jobParameters = new JobParameters(parameters);

    Job job = jobLocator.getJob("GBIFImport");
    assertNotNull("GBIFImport must not be null", job);
    JobExecution jobExecution = jobLauncher.run(job, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount());
    }
}
项目:eMonocot    文件:CalculateDerivedPropertiesJobIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testNotModifiedResponse() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {
    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();
    parameters.put("query.string", new JobParameter(
            "select t.id from Taxon t join t.parentNameUsage"));

    JobParameters jobParameters = new JobParameters(parameters);

    Job job = jobLocator
            .getJob("CalculateDerivedProperties");
    assertNotNull("CalculateDerivedProperties must not be null",
            job);
    JobExecution jobExecution = jobLauncher.run(job, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");

    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount());
    }
}
项目:eMonocot    文件:TaxonImportingIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testImportTaxa() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {
    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();
    parameters.put("authority.name", new JobParameter(
            "test"));
    parameters.put("family", new JobParameter(
            "Araceae"));
    parameters.put("taxon.processing.mode", new JobParameter("IMPORT_TAXA_BY_AUTHORITY"));
    String repository = properties.getProperty("test.resource.baseUrl");
    parameters.put("authority.uri", new JobParameter(repository + "dwc.zip"));
    parameters.put("authority.last.harvested",
            new JobParameter(Long.toString((TaxonImportingIntegrationTest.PAST_DATETIME.getMillis()))));
    JobParameters jobParameters = new JobParameters(parameters);

    Job darwinCoreArchiveHarvestingJob = jobLocator.getJob("DarwinCoreArchiveHarvesting");
    assertNotNull("DarwinCoreArchiveHarvesting must not be null", darwinCoreArchiveHarvestingJob);
    JobExecution jobExecution = jobLauncher.run(darwinCoreArchiveHarvestingJob, jobParameters);
    assertEquals("The job should complete successfully","COMPLETED",jobExecution.getExitStatus().getExitCode());
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount());
    }

    //Test namePublishedIn is saved
    //        assertNotNull("The namePublishedIn should have been saved.",
    //                referenceService.find("urn:example.com:test:ref:numerouno"));
}
项目:eMonocot    文件:IdentificationKeyImportingIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testImportTaxa() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {
    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();
    parameters.put("authority.name", new JobParameter(
            "test"));
    parameters.put("family", new JobParameter(
            "Araceae"));
    parameters.put("key.processing.mode", new JobParameter("IMPORT_KEYS"));
    parameters.put("taxon.processing.mode", new JobParameter("IMPORT_TAXA_BY_AUTHORITY"));
    parameters.put("authority.uri", new JobParameter("http://build.e-monocot.org/oldtest/test.zip"));
    parameters.put("authority.last.harvested",
            new JobParameter(Long.toString((IdentificationKeyImportingIntegrationTest.PAST_DATETIME.getMillis()))));
    JobParameters jobParameters = new JobParameters(parameters);

    Job darwinCoreArchiveHarvestingJob = jobLocator
            .getJob("DarwinCoreArchiveHarvesting");
    assertNotNull("DarwinCoreArchiveHarvesting must not be null",
            darwinCoreArchiveHarvestingJob);
    JobExecution jobExecution = jobLauncher.run(darwinCoreArchiveHarvestingJob, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount());
    }
}
项目:eMonocot    文件:PhylogeneticTreeJobIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testNotModifiedResponse() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {
    Session session = sessionFactory.openSession();
    Transaction tx = session.beginTransaction();

    List<Taxon> taxa = session.createQuery("from Taxon as taxon").list();
    solrIndexingListener.indexObjects(taxa);
    tx.commit();

    Map<String, JobParameter> parameters = new HashMap<String, JobParameter>();
    parameters.put("authority.name", new JobParameter("test"));
    parameters.put("input.file.extension", new JobParameter("nwk"));
    parameters.put("root.taxon.identifier", new JobParameter("urn:kew.org:wcs:taxon:16026"));
    String repository = properties.getProperty("test.resource.baseUrl");
    parameters.put("authority.uri", new JobParameter(repository + "test.nwk"));
    parameters.put("authority.last.harvested", new JobParameter(Long.toString((PhylogeneticTreeJobIntegrationTest.PAST_DATETIME.getMillis()))));
    JobParameters jobParameters = new JobParameters(parameters);

    Job identificationKeyHarvestingJob = jobLocator.getJob("PhylogeneticTreeHarvesting");
    assertNotNull("PhylogeneticTreeHarvesting must not be null", identificationKeyHarvestingJob);
    JobExecution jobExecution = jobLauncher.run(identificationKeyHarvestingJob, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount());
    }


    List<Annotation> annotations = session.createQuery("from Annotation a").list();
    for(Annotation a : annotations) {
        logger.info(a.getJobId() + " " + a.getRecordType() + " " + a.getType() + " " + a.getCode() + " " + a.getText());
    }
}
项目:eMonocot    文件:ReIndexingJobIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testNotModifiedResponse() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {
    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();
    parameters.put("query.string", new JobParameter("select t.id from Taxon t"));
    parameters.put("query.type", new JobParameter("org.emonocot.model.Taxon"));
    parameters.put("solr.selected.facets", new JobParameter("base.class_s=org.emonocot.model.Taxon"));

    JobParameters jobParameters = new JobParameters(parameters);

    Job job = jobLocator
            .getJob("ReIndex");
    assertNotNull("ReIndex must not be null",
            job);
    JobExecution jobExecution = jobLauncher.run(job, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");

    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount());
    }
}
项目:eMonocot    文件:XperIdentificationKeyJobIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testNotModifiedResponse() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {
    Session session = sessionFactory.openSession();
    Transaction tx = session.beginTransaction();

    List<Taxon> taxa = session.createQuery("from Taxon as taxon").list();
    solrIndexingListener.indexObjects(taxa);
    tx.commit();

    Map<String, JobParameter> parameters = new HashMap<String, JobParameter>();
    parameters.put("authority.name", new JobParameter("test"));
    parameters.put("root.taxon.identifier", new JobParameter("urn:kew.org:wcs:taxon:16026"));
    String repository = properties.getProperty("test.resource.baseUrl");
    parameters.put("authority.uri", new JobParameter(repository + "testXperKey.xml"));
    parameters.put("authority.last.harvested", new JobParameter(Long.toString((XperIdentificationKeyJobIntegrationTest.PAST_DATETIME.getMillis()))));
    JobParameters jobParameters = new JobParameters(parameters);

    Job identificationKeyHarvestingJob = jobLocator.getJob("IdentificationKeyHarvesting");
    assertNotNull("IdentificationKeyHarvesting must not be null", identificationKeyHarvestingJob);
    JobExecution jobExecution = jobLauncher.run(identificationKeyHarvestingJob, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount());
    }


    List<Annotation> annotations = session.createQuery("from Annotation a").list();
    for(Annotation a : annotations) {
        logger.info(a.getJobId() + " " + a.getRecordType() + " " + a.getType() + " " + a.getCode() + " " + a.getText());
    }
}
项目:eMonocot    文件:IdentificationKeyJobIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testNotModifiedResponse() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {
    Session session = sessionFactory.openSession();
    Transaction tx = session.beginTransaction();

    List<Taxon> taxa = session.createQuery("from Taxon as taxon").list();
    solrIndexingListener.indexObjects(taxa);
    tx.commit();

    Map<String, JobParameter> parameters = new HashMap<String, JobParameter>();
    parameters.put("authority.name", new JobParameter("test"));
    parameters.put("root.taxon.identifier", new JobParameter("urn:kew.org:wcs:taxon:16026"));
    String repository = properties.getProperty("test.resource.baseUrl");
    parameters.put("authority.uri", new JobParameter(repository + "testKey.xml"));
    parameters.put("authority.last.harvested", new JobParameter(Long.toString((IdentificationKeyJobIntegrationTest.PAST_DATETIME.getMillis()))));
    JobParameters jobParameters = new JobParameters(parameters);

    Job identificationKeyHarvestingJob = jobLocator.getJob("IdentificationKeyHarvesting");
    assertNotNull("IdentificationKeyHarvesting must not be null", identificationKeyHarvestingJob);
    JobExecution jobExecution = jobLauncher.run(identificationKeyHarvestingJob, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount());
    }


    List<Annotation> annotations = session.createQuery("from Annotation a").list();
    for(Annotation a : annotations) {
        logger.info(a.getJobId() + " " + a.getRecordType() + " " + a.getType() + " " + a.getCode() + " " + a.getText());
    }
}
项目:powop    文件:DeltaNaturalLanguageDwcIntegrationTest.java   
@Test
public void testCreateGenericArchive() throws NoSuchJobException,
JobExecutionAlreadyRunningException, JobRestartException,
JobInstanceAlreadyCompleteException, JobParametersInvalidException, IOException {
    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();
    parameters.put("specs.file", new JobParameter(specsFile.getFile().getAbsolutePath()));
    parameters.put("items.file", new JobParameter(itemsFile.getFile().getAbsolutePath()));
    parameters.put("taxon.file", new JobParameter(taxonFile.getFile().getAbsolutePath()));
    parameters.put("fields.terminated.by", new JobParameter(","));
    parameters.put("fields.enclosed.by", new JobParameter("\""));
    parameters.put("output.fields",new JobParameter("taxonID,description,language,license"));
    parameters.put("taxon.file.skip.lines", new JobParameter("1"));
    parameters.put("taxon.file.field.names", new JobParameter("taxonID,subfamily,subtribe,tribe,accessRights,bibliographicCitation,created,license,modified,references,rights,rightsHolder,acceptedNameUsage,acceptedNameUsageID,class,datasetID,datasetName,family,genus,infraspecificEpithet,kingdom,nameAccordingTo,namePublishedIn,namePublishedInID,namePublishedInYear,nomenclaturalCode,nomenclaturalStatus,order,originalNameUsage,originalNameUsageID,parentNameUsage,parentNameUsageID,phylum,scientificName,scientificNameAuthorship,scientificNameID,specificEpithet,subgenus,taxonRank,taxonRemarks,taxonomicStatus,verbatimTaxonRank"));
    parameters.put("taxon.file.delimiter", new JobParameter("\t"));
    parameters.put("taxon.file.quote.character", new JobParameter("\""));
    parameters.put("description.file.field.names", new JobParameter("taxonID,description,license,language,type,rights"));
    parameters.put("description.default.values", new JobParameter("license=http://creativecommons.org/licenses/by-nc-sa/3.0,language=EN,type=general,rights=© Copyright The Board of Trustees\\, Royal Botanic Gardens\\, Kew."));
    parameters.put("archive.file", new JobParameter(UUID.randomUUID().toString()));

    JobParameters jobParameters = new JobParameters(parameters);

    Job deltaToDwC = jobLocator.getJob("DeltaToDwC");
    assertNotNull("DeltaToDwC must not be null",  deltaToDwC);
    JobExecution jobExecution = jobLauncher.run(deltaToDwC, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount());
    }

}
项目:powop    文件:GrassbaseIntegrationTest.java   
@Test
public void testCreateGenericArchive() throws NoSuchJobException,
JobExecutionAlreadyRunningException, JobRestartException,
JobInstanceAlreadyCompleteException, JobParametersInvalidException, IOException {
    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();
    parameters.put("species.specs.file", new JobParameter(speciesSpecsFile.getFile().getAbsolutePath()));
    parameters.put("species.items.file", new JobParameter(speciesItemsFile.getFile().getAbsolutePath()));
    parameters.put("species.taxon.file", new JobParameter(speciesTaxonFile.getFile().getAbsolutePath()));
    parameters.put("genera.specs.file", new JobParameter(generaSpecsFile.getFile().getAbsolutePath()));
    parameters.put("genera.items.file", new JobParameter(generaItemsFile.getFile().getAbsolutePath()));
    parameters.put("genera.taxon.file", new JobParameter(generaTaxonFile.getFile().getAbsolutePath()));
    parameters.put("images.file", new JobParameter(imagesFile.getFile().getAbsolutePath()));

    JobParameters jobParameters = new JobParameters(parameters);

    Job deltaToDwC = jobLocator.getJob("Grassbase");
    assertNotNull("Grassbase must not be null",  deltaToDwC);
    JobExecution jobExecution = jobLauncher.run(deltaToDwC, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount());
    }

}
项目:powop    文件:GBIFJobIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testNotModifiedResponse() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {
    Session session = sessionFactory.openSession();
    Transaction tx = session.beginTransaction();

    List<Taxon> taxa = session.createQuery("from Taxon as taxon").list();
    solrIndexingInterceptor.indexObjects(taxa);
    tx.commit();

    Map<String, JobParameter> parameters = new HashMap<String, JobParameter>();
    parameters.put("family", new JobParameter("Araceae"));
    parameters.put("authority.name", new JobParameter("test"));
    parameters.put("authority.uri", new JobParameter(mockHttpUrl + "/list.xml"));
    parameters.put("resource.id", new JobParameter("123"));
    parameters.put("authority.last.harvested", new JobParameter(Long.toString((GBIFJobIntegrationTest.PAST_DATETIME.getMillis()))));


    JobParameters jobParameters = new JobParameters(parameters);

    Job job = jobLocator.getJob("GBIFImport");
    assertNotNull("GBIFImport must not be null", job);
    JobExecution jobExecution = jobLauncher.run(job, jobParameters);
    if(jobExecution.getExitStatus().getExitCode() == "FAILED"){
        logger.info("Errors: " + jobExecution.getFailureExceptions());
    }
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount() + " " + stepExecution.getCommitCount());
    }
}
项目:powop    文件:CalculateDerivedPropertiesJobIntegrationTest.java   
/**
 *
 * @throws IOException
 *             if a temporary file cannot be created.
 * @throws NoSuchJobException
 *             if SpeciesPageHarvestingJob cannot be located
 * @throws JobParametersInvalidException
 *             if the job parameters are invalid
 * @throws JobInstanceAlreadyCompleteException
 *             if the job has already completed
 * @throws JobRestartException
 *             if the job cannot be restarted
 * @throws JobExecutionAlreadyRunningException
 *             if the job is already running
 */
@Test
public final void testNotModifiedResponse() throws IOException,
NoSuchJobException, JobExecutionAlreadyRunningException,
JobRestartException, JobInstanceAlreadyCompleteException,
JobParametersInvalidException {
    Map<String, JobParameter> parameters =
            new HashMap<String, JobParameter>();
    parameters.put("query.string", new JobParameter(
            "select t.id from Taxon t join t.parentNameUsage"));

    JobParameters jobParameters = new JobParameters(parameters);

    Job job = jobLocator
            .getJob("CalculateDerivedProperties");
    assertNotNull("CalculateDerivedProperties must not be null",
            job);
    JobExecution jobExecution = jobLauncher.run(job, jobParameters);
    assertEquals("The job should complete successfully",jobExecution.getExitStatus().getExitCode(),"COMPLETED");

    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        logger.info(stepExecution.getStepName() + " "
                + stepExecution.getReadCount() + " "
                + stepExecution.getFilterCount() + " "
                + stepExecution.getWriteCount());
    }
}
项目:WebAPI    文件:JobService.java   
/**
 * <i>Variation of spring-batch-admin support:
 * org.springframework.batch.admin.web.BatchJobExecutionsController</i>.
 * <p>
 * Return a paged collection of job executions. Filter for a given job.
 * Returned in pages.
 *
 * @param jobName name of the job
 * @param pageIndex start index for the job execution list
 * @param pageSize page size for the list
 * @param comprehensivePage boolean if true returns a comprehensive resultset
 * as a page (i.e. pageRequest(0,resultset.size()))
 * @return collection of JobExecutionInfo
 * @throws NoSuchJobException
 */
@GET
@Path("/execution")
@Produces(MediaType.APPLICATION_JSON)
public Page<JobExecutionResource> list(@QueryParam("jobName") final String jobName,
        @DefaultValue("0") @QueryParam("pageIndex") final Integer pageIndex,
        @DefaultValue("20") @QueryParam("pageSize") final Integer pageSize,
        @QueryParam("comprehensivePage") boolean comprehensivePage)
        throws NoSuchJobException {

  List<JobExecutionResource> resources = null;

  if (comprehensivePage) {
    String sqlPath = "/resources/job/sql/jobExecutions.sql";
    String tqName = "ohdsi_schema";
    String tqValue = getOhdsiSchema();
    PreparedStatementRenderer psr = new PreparedStatementRenderer(null, sqlPath, tqName, tqValue);
    resources = getJdbcTemplate().query(psr.getSql(), psr.getSetter(), new ResultSetExtractor<List<JobExecutionResource>>() {
      @Override
      public List<JobExecutionResource> extractData(ResultSet rs) throws SQLException, DataAccessException {

        return JobUtils.toJobExecutionResource(rs);
      }
    });
    return new PageImpl<>(resources, new PageRequest(0, pageSize), resources.size());
  } else {
    resources = new ArrayList<>();
    for (final JobExecution jobExecution : (jobName == null ? this.jobExecutionDao.getJobExecutions(pageIndex,
            pageSize) : this.jobExecutionDao.getJobExecutions(jobName, pageIndex, pageSize))) {
      resources.add(JobUtils.toJobExecutionResource(jobExecution));
    }
    return new PageImpl<>(resources, new PageRequest(pageIndex, pageSize),
            this.jobExecutionDao.countJobExecutions());
  }

}
项目:spring-batch-tools    文件:BatchController.java   
@RequestMapping("/jobs")
@ResponseBody
public List<JobInfo> jobs() {
    return jobRegistry.getJobNames().stream() //
            .sorted() //
            .map(name -> {
                try {
                    return toJobInfo(getJob(name));
                } catch (final NoSuchJobException e) {
                    throw new IllegalArgumentException("Job not found: " + name, e);
                }
            }) //
            .collect(Collectors.toList());
}
项目:spring-batch-tools    文件:BatchController.java   
@RequestMapping("/jobs/{jobName}/instances/count")
@ResponseBody
public Map<String, ?> instancesCount(@PathVariable("jobName") final String jobName) throws NoSuchJobException {
    final Map<String, Object> map = new HashMap<String, Object>();
    map.put("count", jobExplorer.getJobInstanceCount(jobName));

    return map;
}
项目:spring-batch-tools    文件:BatchController.java   
@RequestMapping("/executions/{executionId}/restart")
@ResponseBody
public JobExecution restartExecution(@PathVariable("executionId") final long executionId) throws JobInstanceAlreadyCompleteException,
        NoSuchJobExecutionException, NoSuchJobException, JobRestartException, JobParametersInvalidException,
        JobExecutionAlreadyRunningException {
    final long restartedExecutionId = batchOperator.restart(executionId);

    return execution(restartedExecutionId);
}
项目:spring-batch-tools    文件:BatchController.java   
private Job getJob(final String jobName) throws NoSuchJobException {
    try {
        return jobRegistry.getJob(jobName);
    } catch (final NoSuchJobException e) {
        throw new NotFoundException();
    }
}
项目:spring-batch-tools    文件:BatchOperatorImpl.java   
/**
 * Merges the job's default parameters with the given input parameters. Input parameters override default parameters.
 *
 * @param inputParameter
 *            the input parameters, may be <code>null</code>.
 */
private JobParameters createJobParameters(final String jobName, final JobParameters inputParameter) throws NoSuchJobException,
        JobParametersNotFoundException {
    final JobParameters params = getNextJobParameters(jobName);
    return Optional.ofNullable(inputParameter).map(customParams -> {
        final JobParametersBuilder builder = new JobParametersBuilder(params);
        customParams.getParameters().forEach((key, value) -> builder.addParameter(key, value));
        return builder.toJobParameters();
    }).orElse(params);
}
项目:spring-batch-tools    文件:BatchOperatorImpl.java   
private JobParameters getNextJobParameters(final String jobName) throws NoSuchJobException, JobParametersNotFoundException {
    final Job job = jobRegistry.getJob(jobName);
    final JobParametersIncrementer incrementer = Optional.ofNullable(job.getJobParametersIncrementer()).orElseThrow(
            () -> new JobParametersNotFoundException("No job parameters incrementer found for job " + jobName));

    return getNextJobParameters(jobName, incrementer);
}