Spring Boot批处理-从计划任务运行多个批处理作业会导致并发问题

时间:2018-08-16 14:43:53

标签: java spring spring-boot spring-batch

我使用Spring Boot + Batch创建了三个不同的批处理作业。当我尝试通过计划的任务运行这些作业时。同时(相差几秒钟)。当Filereader开始报告解析异常时,我遇到了意外的问题。如果按顺序运行这些作业或通过CommandLineRunner(一次运行一个作业)运行这些作业,则效果很好。每个阅读器组件都在“步骤”范围内配置。有人可以看看并提出可能的解决方案吗?下面的代码。

Job 1 :

        @Bean(JOB_NAME)
        public Job importMonetryFilesJob(JobCompletionNotificationListener listener) {
            return jobBuilderFactory.get(JOB_NAME).incrementer(new RunIdIncrementer()).listener(listener).flow(step1())
                    .end().build();
        }

        @SuppressWarnings("unchecked")
        @Bean(STEP_1)
        public Step step1() {
            TaskletStep step1 = stepBuilderFactory.get(STEP_1).<RawMonteryFileInput, RawMonteryFileInput>chunk(1)
                    .reader(reader()).processor(processor()).writer(writer()).faultTolerant()
                    .skipPolicy(new JobSkipPolicy()).listener(monetryCustomSkipListener()).build();
            step1.registerStepExecutionListener(batchSweepStepContext());
            return step1;
        }

        @Bean(READER_NAME)
        @StepScope
        public MultiResourceItemReader<RawMonteryFileInput> reader() {

            MultiResourceItemReader<RawMonteryFileInput> multiReader = new MultiResourceItemReader<RawMonteryFileInput>();

            multiReader.setResources(inputFiles);
            multiReader.setDelegate(readerInternal());
            return multiReader;
        }

        @Bean(READER_NAME + "_internal")
        @StepScope
        public FlatFileItemReader<RawMonteryFileInput> readerInternal() {
            CustomFlatFileItemReader<RawMonteryFileInput> reader = new CustomFlatFileItemReader<>();
            reader.setRecordSeparatorPolicy(new MergingItemSeparator());
            reader.setLineMapper(sourceAwareLineMapper());
            reader.setFileLocation(processedFilesLocation);
            return reader;
        }

        @Bean(PROCESSOR_NAME)
        @StepScope
        public ValidatingItemProcessor<RawMonteryFileInput> processor() {
            SpringValidator<RawMonteryFileInput> springValidator = new SpringValidator<RawMonteryFileInput>();
            springValidator.setValidator(validator);
            return new ValidatingItemProcessor<RawMonteryFileInput>(springValidator);
        }

        @Bean(WRITER_NAME)
        @StepScope
        public FlatFileItemWriter<RawMonteryFileInput> writer() {

            FlatFileItemWriter<RawMonteryFileInput> writer = new FlatFileItemWriter<RawMonteryFileInput>();
            writer.setResource(new FileSystemResource(getWriterFileName()));
            writer.setShouldDeleteIfEmpty(true);
            writer.setLineAggregator(new CustomDelimitedLineAggregator<RawMonteryFileInput>() {
                {
                    setDelimiter("|");
                    setFieldExtractor(new CustomBeanWrapperExtractor<RawMonteryFileInput>() {
                        {
                            setNames(MonetryJobConstants.WRITER_HEADER);
                        }
                    });
                }
            });
            return writer;
        }

    Job 2 :

        @Bean(JOB_NAME)
        public Job importAccountBalanceJob(JobCompletionNotificationListener listener) {
            return jobBuilderFactory.get(JOB_NAME).incrementer(new RunIdIncrementer()).listener(listener).flow(step1())
                    .end().build();
        }

        @SuppressWarnings("unchecked")
        @Bean(STEP_1)
        public Step step1() {
            TaskletStep step1 = stepBuilderFactory.get(STEP_1).<AccountBalanceFileInput, AccountBalanceFileInput>chunk(1)
                    .reader(reader()).processor(processor()).writer(writer()).faultTolerant()
                    .skipPolicy(new JobSkipPolicy()).listener(accountBalanceCustomSkipListener()).build();
            step1.registerStepExecutionListener(batchSweepStepContext());
            return step1;
        }

        @Bean(READER_NAME)
        @StepScope
        public MultiResourceItemReader<AccountBalanceFileInput> reader() {

            // Assert.notEmpty(inputFiles, "Input files on location are required.");
            MultiResourceItemReader<AccountBalanceFileInput> multiReader = new MultiResourceItemReader<AccountBalanceFileInput>();

            multiReader.setResources(inputFiles);
            multiReader.setDelegate(readerInternal());
            return multiReader;
        }

        @Bean(READER_NAME + "_internal")
        @StepScope
        public FlatFileItemReader<AccountBalanceFileInput> readerInternal() {
            CustomFlatFileItemReader<AccountBalanceFileInput> reader = new CustomFlatFileItemReader<>();
            reader.setRecordSeparatorPolicy(new MergingItemSeparator());
            reader.setLineMapper(sourceAwareLineMapper());
            reader.setFileLocation(processedFilesLocation);
            return reader;
        }

        @Bean(PROCESSOR_NAME)
        @StepScope
        public ValidatingItemProcessor<AccountBalanceFileInput> processor() {
            SpringValidator<AccountBalanceFileInput> springValidator = new SpringValidator<AccountBalanceFileInput>();
            springValidator.setValidator(validator);
            return new ValidatingItemProcessor<AccountBalanceFileInput>(springValidator);
        }

        @Bean(WRITER_NAME)
        @StepScope
        public FlatFileItemWriter<AccountBalanceFileInput> writer() {

            FlatFileItemWriter<AccountBalanceFileInput> writer = new FlatFileItemWriter<AccountBalanceFileInput>();
            writer.setResource(new FileSystemResource(getWriterFileName()));
            writer.setShouldDeleteIfEmpty(true);
            writer.setLineAggregator(new CustomDelimitedLineAggregator<AccountBalanceFileInput>() {
                {
                    setDelimiter("|");
                    setFieldExtractor(new CustomBeanWrapperExtractor<AccountBalanceFileInput>() {
                        {
                            setNames(AccountBalanceJobConstants.WRITER_HEADER);
                        }
                    });
                }
            });
            return writer;
        }

    Config : 

        @Configuration
    @EnableAutoConfiguration
    @EnableBatchProcessing(modular = true)
    public class BatchJobsConfig {

        @Bean
        public ResourcelessTransactionManager transactionManager() {
            return new ResourcelessTransactionManager();
        }

        @Bean
        public MapJobRepositoryFactoryBean mapJobRepositoryFactory(ResourcelessTransactionManager transactionManager)
                throws Exception {
            MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean(transactionManager);
            factory.afterPropertiesSet();
            return factory;
        }

        @Bean
        public JobRepository jobRepository(MapJobRepositoryFactoryBean repositoryFactory) throws Exception {
            return repositoryFactory.getObject();
        }

        @Bean
        public JobExplorer jobExplorer(MapJobRepositoryFactoryBean repositoryFactory) {
            return new SimpleJobExplorer(repositoryFactory.getJobInstanceDao(), repositoryFactory.getJobExecutionDao(),
                    repositoryFactory.getStepExecutionDao(), repositoryFactory.getExecutionContextDao());
        }

        @Bean
        public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
            SimpleJobLauncher launcher = new SimpleJobLauncher();
            launcher.setJobRepository(jobRepository);
            return launcher;
        }

    Schedule task looks like this :

        @Slf4j
    @Component
    @EnableScheduling
    public class AccountBalanceJobSchedular {

        @Autowired
        private SimpleJobLauncher jobLauncher;

        @Autowired
        @Qualifier(AccountBalanceFileLoadJob.JOB_ACCOUNT_BAL_LOAD)
        private Job batchJob;


        @Scheduled(cron = "*/6 * * * * *")
        public  void perform() throws Exception {


            JobParameters param = new JobParametersBuilder()
                    .addString("JobID", String.valueOf(System.nanoTime() + UUID.randomUUID().toString())).toJobParameters();

            JobExecution execution = jobLauncher.run(batchJob, param);

            log.info("Job finished with status :" + execution.getStatus());
        }
    }

每个作业都有其自己的名称和自己的类文件,因此每个bean的名称会根据作业名称而有所不同。如果作业未同时运行,则可以正常工作。任何帮助将不胜感激。

我正在使用Spring Boot 2,Spring Batch 4和Java 10。

0 个答案:

没有答案