2016-05-13 89 views
0

我最近開始使用java配置方式編寫spring批處理程序,並使用spring批處理和啓動器包。我使用分區的步驟和任務執行程序來完成我的工作。我面臨的問題是,一旦工作完成,批處理過程不會停止,並且它將繼續在我的eclipse和Linux框中運行。我手動查找並殺死作業。你能幫忙嗎? 這個工作正常,當我沒有分區的步驟和以單線程的方式運行作業。彈簧啓動彈簧批量分配作業在完成後不停止

我的工作配置:

@Bean 
    @StepScope 
    public ItemReader<MediaAsset> metaDataExportReader(@Value("#{jobParameters[sourceSystemCode]}") String sourceSystemCode,@Value("#{jobParameters[assetType]}") String assetType,@Value("#{stepExecutionContext[startingMediaAssetId]}") long startingMediaAssetId, 
      @Value("#{stepExecutionContext[endingMediaAssetId]}") long endingMediaAssetId,@Value("#{stepExecutionContext[threadName]}") String threadName) throws Exception { 
     logger.debug("Reader is called...."+sourceSystemCode); 
     logger.debug("page size---------->"+jobConfig.getPageOrChunkSizeMetaDataExport()); 
     logger.debug("startingMediaAssetId---------->"+startingMediaAssetId); 
     logger.debug("endingMediaAssetId"+endingMediaAssetId); 
     logger.debug("threadName"+threadName); 
     final Map<String,Object> parameters = new HashMap<>(); 
     parameters.put("startingMediaAssetId",startingMediaAssetId); 
     parameters.put("endingMediaAssetId",endingMediaAssetId); 
     JdbcPagingItemReader<MediaAsset> jdbcPagingItemReader = getJdbcPagingItemReader(sourceSystemCode, assetType); 
     jdbcPagingItemReader.setParameterValues(parameters); 
     return jdbcPagingItemReader; 
    } 

    @Bean(destroyMethod="close") 
    @StepScope 
    public ItemWriter<MediaAsset> metaDataExportWriter(@Value("#{jobParameters[sourceSystemCode]}") String sourceSystemCode,@Value("#{jobParameters[assetType]}") String assetType,@Value("#{stepExecutionContext[startingMediaAssetId]}") long startingMediaAssetId, 
      @Value("#{stepExecutionContext[endingMediaAssetId]}") long endingMediaAssetId,@Value("#{stepExecutionContext[threadName]}") String threadName) throws Exception { 
     logger.debug("Coming here Item Writer,..."+threadName); 
     logger.debug("getItemsPerFile---------->"+jobConfig.getPageOrChunkSizeMetaDataExport()); 
     //for xml file creation 
     StaxEventItemWriter<MediaAsset> staxEventItemWriter = new StaxEventItemWriter<>(); 
     staxEventItemWriter.setRootTagName(DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_ROOT_TAG); 
     staxEventItemWriter.setMarshaller(marshaller); 
     staxEventItemWriter.setOverwriteOutput(true); 
     //for splitting the files into multiple files based on record size 
     MultiResourceItemWriter<MediaAsset> multiResourceItemWriter = new MultiResourceItemWriter<>(); 
     multiResourceItemWriter.setItemCountLimitPerResource(jobConfig.getPageOrChunkSizeMetaDataExport()); 
     multiResourceItemWriter.setDelegate(staxEventItemWriter); 
     multiResourceItemWriter.setResourceSuffixCreator(new ResourceSuffixCreator() { 
      @Override 
      public String getSuffix(int index) { 
       return DL3ConstantUtil.UNDERSCORE+threadName+DL3ConstantUtil.UNDERSCORE+startingMediaAssetId+DL3ConstantUtil.UNDERSCORE+endingMediaAssetId+DL3ConstantUtil.UNDERSCORE+index+DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_FILE_NAME_SUFFIX; 
      } 
     }); 
     logger.debug("writer sourceSystemCode"+sourceSystemCode); 
     switch (assetType) { 
     case DL3ConstantUtil.IMAGE_ASSET: 
      switch (sourceSystemCode) { 
      case DL3ConstantUtil.LIGHTBOX: 
       multiResourceItemWriter.setResource(new FileSystemResource(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"IA"+jobConfig.getBackSlash()+"DPL"+jobConfig.getBackSlash()+DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_LIGHT_BOX_FILE_NAME_PREFIX_NAME_IMG)); 
       break; 
      case DL3ConstantUtil.SOLAR: 
       multiResourceItemWriter.setResource(new FileSystemResource(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"IA"+jobConfig.getBackSlash()+"SOLAR"+jobConfig.getBackSlash()+DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_SOLAR_BOX_FILE_NAME_PREFIX_NAME_IMG)); 
       break; 
      case DL3ConstantUtil.MANUAL_UPLOAD: 
       multiResourceItemWriter.setResource(new FileSystemResource(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"IA"+jobConfig.getBackSlash()+"DDDS"+jobConfig.getBackSlash()+DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_DDDS_BOX_FILE_NAME_PREFIX_NAME_IMG)); 
       break; 
      default: 
       break; 
      } 
     break; 
     case DL3ConstantUtil.DOCUMENT_ASSET: 
      switch (sourceSystemCode) { 
      case DL3ConstantUtil.SOLAR: 
       multiResourceItemWriter.setResource(new FileSystemResource(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"DA"+jobConfig.getBackSlash()+"SOLAR"+jobConfig.getBackSlash()+DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_SOLAR_BOX_FILE_NAME_PREFIX_NAME_DOC)); 
       break; 
      case DL3ConstantUtil.MANUAL_UPLOAD: 
       multiResourceItemWriter.setResource(new FileSystemResource(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"DA"+jobConfig.getBackSlash()+"DDDS"+jobConfig.getBackSlash()+DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_DDDS_BOX_FILE_NAME_PREFIX_NAME_DOC)); 
       break; 
      default: 
       break; 
      } 
      break; 
     default: 
      throw new Exception("no matching assetType "); 
     } 
     return multiResourceItemWriter; 
    } 


    @Bean(name="GenerateXMLFilesMaster") 
    public Step generateXMLFilesMaster(ItemReader<MediaAsset> metaDataExportReader,ItemWriter<MediaAsset> metaDataExportWriter) { 
     logger.debug("Master Step initialization..."); 
     return stepBuilderFactory.get("GenerateXMLFilesMaster"). 
       partitioner(generateXMLFilesSlave(metaDataExportReader,metaDataExportWriter)). 
       partitioner("GenerateXMLFilesSlave",metaDataExportPartioner(null,null,null)). 
       partitionHandler(metaDataExportPartionHandler(metaDataExportReader,metaDataExportWriter)). 
       build(); 

    } 

    @Bean(name="GenerateXMLFilesSlave") 
    public Step generateXMLFilesSlave(ItemReader<MediaAsset> metaDataExportReader,ItemWriter<MediaAsset> metaDataExportWriter) { 
     return stepBuilderFactory.get("GenerateXMLFilesSlave") 
       .<MediaAsset, MediaAsset> chunk(jobConfig.getPageOrChunkSizeMetaDataExport()) 
       .reader(metaDataExportReader) 
       .writer(metaDataExportWriter) 
       .build(); 
    } 

    @Bean(name="uploadTaskletMetaData") 
    @StepScope 
    public Tasklet uploadTaskletMetaData(@Value("#{jobParameters[sourceSystemCode]}") String sourceSystemCode,@Value("#{jobParameters[assetType]}") String assetType){ 
     MetaDataUploadTasklet metaDataUploadTasklet = new MetaDataUploadTasklet(); 
     logger.debug("sourceSystemCode----->"+sourceSystemCode); 
     logger.debug("assetType----->"+assetType); 
     metaDataUploadTasklet.setTargetFolder(jobConfig.getTargetMetaDataRootPath()); 
     switch (assetType) { 
     case DL3ConstantUtil.IMAGE_ASSET: 
      switch (sourceSystemCode) { 
      case DL3ConstantUtil.LIGHTBOX: 
       metaDataUploadTasklet.setSourceDirectory(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"IA"+jobConfig.getBackSlash()+"DPL"+jobConfig.getBackSlash()); 
       //metaDataUploadTasklet.setTargetFolder(jobConfig.getTargetMetaDataRootPath()+"/IA/DPL"); 
       break; 
      case DL3ConstantUtil.SOLAR: 
       metaDataUploadTasklet.setSourceDirectory(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"IA"+jobConfig.getBackSlash()+"SOLAR"+jobConfig.getBackSlash()); 
       //metaDataUploadTasklet.setTargetFolder(jobConfig.getTargetMetaDataRootPath()+"/IA/SOLAR"); 
       break; 
      case DL3ConstantUtil.MANUAL_UPLOAD: 
       metaDataUploadTasklet.setSourceDirectory(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"IA"+jobConfig.getBackSlash()+"DDDS"+jobConfig.getBackSlash()); 
       //metaDataUploadTasklet.setTargetFolder(jobConfig.getTargetMetaDataRootPath()+"/IA/DDDS"); 
       break; 
      default: 
       break; 
      } 
      break; 
     case DL3ConstantUtil.DOCUMENT_ASSET: 
      switch (sourceSystemCode) { 
      case DL3ConstantUtil.SOLAR: 
       metaDataUploadTasklet.setSourceDirectory(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"DA"+jobConfig.getBackSlash()+"SOLAR"+jobConfig.getBackSlash()); 
       //metaDataUploadTasklet.setTargetFolder(jobConfig.getTargetMetaDataRootPath()+"/DA/SOLAR"); 
       break; 
      case DL3ConstantUtil.MANUAL_UPLOAD: 
       metaDataUploadTasklet.setSourceDirectory(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"DA"+jobConfig.getBackSlash()+"DDDS"+jobConfig.getBackSlash()); 
       //metaDataUploadTasklet.setTargetFolder(jobConfig.getTargetMetaDataRootPath()+"/DA/DDDS"); 
       break; 
      default: 
       break; 
      } 
      break; 
     default: 
      break; 
     } 


     return metaDataUploadTasklet; 
    } 

    @Bean(name="UploadXMLFiles") 
    public Step uploadXMLFiles(){ 
     return stepBuilderFactory.get("UploadXMLFiles").tasklet(uploadTaskletMetaData(null,null)).build(); 
    } 

    @Bean 
    @StepScope 
    public Partitioner metaDataExportPartioner(@Value("#{jobParameters[sourceSystemCode]}") String sourceSystemCode,@Value("#{jobParameters[assetType]}") String assetType,@Value("#{jobExecutionContext[totalCount]}") String totalCount){ 
     logger.debug("source system code--->"+sourceSystemCode); 
     logger.debug("assetType--->"+assetType); 
     MetaDataExportPartioner metaDataExportPartioner = new MetaDataExportPartioner(); 
     metaDataExportPartioner.setSourceSystemCode(sourceSystemCode); 
     metaDataExportPartioner.setAssetType(assetType); 
     logger.debug("In the partioner initiliazation------>"+totalCount); 
     metaDataExportPartioner.setTotalCount(StringUtils.isEmpty(totalCount)?0:Integer.parseInt(totalCount)); 
     return metaDataExportPartioner; 
    } 

    @Bean 
    public PartitionHandler metaDataExportPartionHandler(ItemReader<MediaAsset> reader,ItemWriter<MediaAsset> writer){ 
     logger.debug("Initializing partionHandler------>"); 
     TaskExecutorPartitionHandler partitionHandler = new TaskExecutorPartitionHandler(); 
     partitionHandler.setStep(generateXMLFilesSlave(reader,writer)); 
     partitionHandler.setGridSize(6); 
     partitionHandler.setTaskExecutor(taskExecutor()); 
     return partitionHandler; 
    } 

    @Bean 
    public TaskExecutor taskExecutor() { 
     ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor(); 
     taskExecutor.setMaxPoolSize(10); 
     taskExecutor.setCorePoolSize(10); 
     taskExecutor.afterPropertiesSet(); 
     return taskExecutor; 
    } 

    @Bean() 
    public JobExecutionListener metaDataExportJobExecutionListener(){ 
     JobExecutionListener jobExecutionListener = new MetaDataExportJobListener(); 
     return jobExecutionListener; 
    } 

    @Bean 
    public Job exportMetaDataJob(JobExecutionListener metaDataExportJobExecutionListener) throws Exception { 
     return jobBuilderFactory.get("ExportMetaDataJob") 
       .incrementer(new RunIdIncrementer()) 
       .listener(metaDataExportJobExecutionListener) 
       .flow(generateXMLFilesMaster(metaDataExportReader(null,null,0L,0L,null),metaDataExportWriter(null,null,0L,0L,null))) 
       //.next(uploadXMLFiles()) 
       .end() 
       .build(); 
    } 

我的POM文件條目:

<parent> 
     <groupId>org.springframework.boot</groupId> 
     <artifactId>spring-boot-starter-parent</artifactId> 
     <version>1.3.2.RELEASE</version> 
     <relativePath /> <!-- lookup parent from repository --> 
    </parent> 

    <properties> 
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> 
     <java.version>1.8</java.version> 
     <spring-cloud-version>1.0.4.RELEASE</spring-cloud-version> 
     <spring-batch-admin.version>1.3.0.RELEASE</spring-batch-admin.version> 
    </properties> 

    <dependencies> 

     <!-- <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-web</artifactId> 
      </dependency> --> 

     <dependency> 
      <groupId>org.springframework.boot</groupId> 
      <artifactId>spring-boot-starter-batch</artifactId> 
     </dependency> 

     <dependency> 
      <groupId>org.springframework.boot</groupId> 
      <artifactId>spring-boot-starter-mail</artifactId> 
     </dependency> 

     <dependency> 
     <groupId>org.springframework.boot</groupId> 
     <artifactId>spring-boot-starter-thymeleaf</artifactId> 
     <exclusions> 
     <exclusion> 
      <groupId>org.springframework.boot</groupId> 
      <artifactId>spring-boot-starter-web</artifactId> 
     </exclusion> 
    </exclusions> 
    </dependency> 

     <!-- <dependency> <groupId>org.springframework.batch</groupId> <artifactId>spring-batch-admin-manager</artifactId> 
      <version>${spring-batch-admin.version}</version> <exclusions> <exclusion> 
      <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> 
      <exclusion> <artifactId>slf4j-api</artifactId> <groupId>org.slf4j</groupId> 
      </exclusion> </exclusions> </dependency> --> 

     <dependency> 
      <groupId>org.springframework.cloud</groupId> 
      <artifactId>spring-cloud-aws-context</artifactId> 
      <version>${spring-cloud-version}</version> 
     </dependency> 

     <dependency> 
      <groupId>com.microsoft.sqlserver</groupId> 
      <artifactId>sqljdbc4</artifactId> 
      <version>4.0</version> 
     </dependency> 

     <dependency> 
      <groupId>com.oracle</groupId> 
      <artifactId>ojdbc14</artifactId> 
      <version>10.2.0.3.0</version> 
     </dependency> 

     <dependency> 
      <groupId>org.springframework</groupId> 
      <artifactId>spring-oxm</artifactId> 
     </dependency> 

     <dependency> 
      <groupId>org.springframework.boot</groupId> 
      <artifactId>spring-boot-starter-test</artifactId> 
      <scope>test</scope> 
     </dependency> 

     <dependency> 
      <groupId>org.springframework.boot</groupId> 
      <artifactId>spring-boot-devtools</artifactId> 
     </dependency> 

     <dependency> 
      <groupId>commons-io</groupId> 
      <artifactId>commons-io</artifactId> 
      <version>2.3</version> 
     </dependency> 

     <!-- <dependency> 
      <groupId>org.apache.commons</groupId> 
      <artifactId>commons-dbcp2</artifactId> 
      <version>2.0.1</version> 
     </dependency> --> 

     <!-- <dependency> <groupId>com.sun.xml.bind</groupId> <artifactId>jaxb-impl</artifactId> 
      <version>2.0.1</version> </dependency> --> 

    </dependencies> 

    <build> 
     <plugins> 
      <plugin> 
       <groupId>org.springframework.boot</groupId> 
       <artifactId>spring-boot-maven-plugin</artifactId> 
      </plugin> 
     </plugins> 
    </build> 

回答

0

的JVM自動當有運行零非守護線程關閉。在未分區的情況下,當作業完成時,沒有非守護線程運行,因此JVM關閉。但是,在你的分區用例中,你必須有一些等待工作,仍然阻止應用程序關閉。做線程轉儲將有助於診斷問題,但我敢打賭ThreadPoolTaskExecutor所持有的線程是問題所在。如果是這樣,您可能需要查看一個不會創建線程池的選項(防止JVM關閉)。

+0

你好邁克爾..很高興看到你的回覆..我觀看了你的視頻和學習了春天的批處理概念。順便說一句,我做了jstack,發現有幾個線程在等待某些東西。我找不到它爲什麼在等待爲了它。爲了解決這個問題,一旦工作完成,我關閉了線程池任務執行程序,解決了問題的jobexecution liseter afetrjob方法。還有一些線程正在運行/等待,因爲我添加了spring-starter-thymeleaf依賴項,默認情況下它包含所有的spring -web,tomcat dependencies.So我排除了thymeleaf的那些。這是一個正確的方法嗎? –