如何在Spring Batch中使用Spring事务支持

时间:2019-06-03 07:24:36

标签: spring-boot spring-batch spring-transactions

我正在尝试使用spring batch从.dat文件读取文件并将数据持久保存到数据库中。我的要求是要么插入所有数据,要么不插入任何数据到表中,即原子性。但是,使用Spring批处理,我无法达到相同的目的,即以记录为单位读取数据并插入数据。如果在某个时候该记录是不合适的,并且抛出了一些数据库异常,那么我想要完全回滚,这不会发生。假设我们在第2051条记录时出错,然后我的代码保存了2050条记​​录,但是我想完全回滚,如果所有数据都很好,那么所有N条记录都应保留。预先感谢您提供的任何帮助或相关方法,可以解决我的问题...

注意:我已经在调用方方法上使用了Spring Transactional注释,但是它不起作用,并且我正在读取10个项目的大块数据。

MyConfiguration.java

@Configuration
public class MyConfiguration 
{

    @Autowired
    JobBuilderFactory jobBuilderFactory;

    @Autowired
    StepBuilderFactory stepBuilderFactory;

    @Autowired
    @Qualifier("MyCompletionListener")
    JobCompletionNotificationListener jobCompletionNotificationListener;

    @StepScope
    @Bean(name="MyReader")
    public FlatFileItemReader<InputMapperDTO> reader(@Value("#{jobParameters['fileName']}") String fileName) throws IOException 
    {
        FlatFileItemReader<InputMapperDTO> newBean = new FlatFileItemReader<>();
        newBean.setName("MyReader");
        newBean.setResource(new InputStreamResource(FileUtils.openInputStream(new File(fileName))));
        newBean.setLineMapper(lineMapper());
        newBean.setLinesToSkip(1);
        return newBean;
    }

    @Bean(name="MyLineMapper")
    public DefaultLineMapper<InputMapperDTO> lineMapper() 
    {
        DefaultLineMapper<InputMapperDTO> lineMapper = new DefaultLineMapper<>();
        lineMapper.setLineTokenizer(lineTokenizer());
        Reader reader = new Reader();
        lineMapper.setFieldSetMapper(reader);
        return lineMapper;
    }

    @Bean(name="MyTokenizer")
    public DelimitedLineTokenizer lineTokenizer() 
    {
        DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
        tokenizer.setDelimiter("|");
        tokenizer.setNames("InvestmentAccountUniqueIdentifier", "BaseCurrencyUniqueIdentifier",
                "OperatingCurrencyUniqueIdentifier", "PricingHierarchyUniqueIdentifier", "InvestmentAccountNumber",
                "DummyAccountIndicator", "InvestmentAdvisorCompanyNumberLegacy","HighNetWorthAccountTypeCode");
        tokenizer.setIncludedFields(0, 5, 7, 13, 29, 40, 49,75);
        return tokenizer;
    }

    @Bean(name="MyBatchProcessor")
    public ItemProcessor<InputMapperDTO, FinalDTO> processor() 
    {
        return new Processor();
    }

    @Bean(name="MyWriter")
    public ItemWriter<FinalDTO> writer() 
    {
        return new Writer();
    }

    @Bean(name="MyStep")
    public Step step1() throws IOException 
    {
        return stepBuilderFactory.get("MyStep")
                .<InputMapperDTO, FinalDTO>chunk(10)
                .reader(this.reader(null))
                .processor(this.processor())
                .writer(this.writer())
                .build();
    }

    @Bean(name=MyJob")
    public Job importUserJob(@Autowired @Qualifier("MyStep") Step step1) 
    {
        return jobBuilderFactory
                .get("MyJob"+new Date())
                .incrementer(new RunIdIncrementer())
                .listener(jobCompletionNotificationListener)
                .flow(step1)
                .end()
                .build();
    }

}

Writer.java

public class Writer implements ItemWriter<FinalDTO>
{

    @Autowired
    SomeRepository someRepository;

    @Override
    public void write(List<? extends FinalDTO> listOfObjects) throws Exception 
    {
        someRepository.saveAll(listOfObjects);      
    }

}

JobCompletionNotificationListener.java

public class JobCompletionNotificationListener extends JobExecutionListenerSupport
{

    @Override
    public void afterJob(JobExecution jobExecution) 
    {
        if(jobExecution.getStatus() == BatchStatus.COMPLETED) 
        {
            System.err.println("****************************************");
            System.err.println("*****    Batch Job Completed      ******");
            System.err.println("****************************************");
        }
        else
        {
            System.err.println("****************************************");
            System.err.println("*****    Batch Job Failed      ******");
            System.err.println("****************************************");
        }
    }

}

MyCallerMethod

    @Transactional
    public String processFile(String datFile) throws JobExecutionAlreadyRunningException, JobRestartException,
            JobInstanceAlreadyCompleteException, JobParametersInvalidException 
    {
        long st = System.currentTimeMillis();

        JobParametersBuilder builder = new JobParametersBuilder();
        builder.addString("fileName",datFile);
        builder.addDate("date", new Date());
        jobLauncher.run(job, builder.toJobParameters());

        System.err.println("****************************************");
        System.err.println("*****    Total time consumed = "+(System.currentTimeMillis()-st)+"      ******");
        System.err.println("****************************************");
        return response;
    }

1 个答案:

答案 0 :(得分:0)

我尝试的操作未批量提供。根据我的要求,我实现了自定义删除功能,该功能可在任何步骤失败时刷新数据库。

相关问题