springspring-batch

Spring Batch MultiResourceItemWriter doesn't correctly writes data in files


This is my SpringBatch maven depedency:

    <dependency>
        <groupId>org.springframework.batch</groupId>
        <artifactId>spring-batch-core</artifactId>
        <version>2.2.0.RELEASE</version>
    </dependency>

Below is my job.xml file

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
    xmlns:batch="http://www.springframework.org/schema/batch" 
    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    xsi:schemaLocation="http://www.springframework.org/schema/batch
        http://www.springframework.org/schema/batch/spring-batch-2.2.xsd
        http://www.springframework.org/schema/beans 
        http://www.springframework.org/schema/beans/spring-beans-3.2.xsd">

    <import resource="../config/launch-context.xml" />

    <bean id="inputFileForMultiResource" class="org.springframework.core.io.FileSystemResource" scope="step">
        <constructor-arg value="src/main/resources/files/customerInputValidation.txt"/>
    </bean>

    <bean id="outputFileForMultiResource" class="org.springframework.core.io.FileSystemResource" scope="step">
        <constructor-arg value="src/main/resources/files/xml/customerOutput.xml"/>
    </bean>

    <bean id="readerForMultiResource" class="org.springframework.batch.item.file.FlatFileItemReader">
        <property name="resource" ref="inputFileForMultiResource" />
        <property name="lineMapper">
            <bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
            <property name="lineTokenizer">
                <bean class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
                    <property name="names" value="firstName,middleInitial,lastName,address,city,state,zip" />
                    <property name="delimiter" value="," />
                </bean>
            </property>
            <property name="fieldSetMapper">
                <bean class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
                    <property name="prototypeBeanName" value="customer5TO" />
                </bean>
            </property>
            </bean>
        </property>
    </bean>
    <bean id="customer5TO" class="net.gsd.group.spring.batch.tutorial.to.Customer5TO"></bean>

    <bean id="xmlOutputWriter" class="org.springframework.batch.item.xml.StaxEventItemWriter">
        <property name="resource" ref="outputFileForMultiResource" />
        <property name="marshaller" ref="customerMarshallerJMSJob" />
        <property name="rootTagName" value="customers" />
    </bean>
    <bean id="customerMarshallerJMSJob" class="org.springframework.oxm.xstream.XStreamMarshaller">
        <property name="aliases">
            <map>
                <entry key="customer" value="net.gsd.group.spring.batch.tutorial.to.Customer5TO"></entry>
            </map>
        </property>
    </bean>
    <bean id="multiResourceItemWriter" class="org.springframework.batch.item.file.MultiResourceItemWriter">
        <property name="resource" ref="customerOutputXmlFile"/>
        <property name="delegate" ref="xmlOutputWriter"/>
        <property name="itemCountLimitPerResource" value="3"/>
        <property name="resourceSuffixCreator" ref="suffix"/>
    </bean>

    <bean id="suffix" class="net.gsd.group.spring.batch.tutorial.util.CustomerOutputFileSuffixCreator"></bean>

    <batch:step id="multiResourceWriterParentStep">
        <batch:tasklet>
            <batch:chunk 
                reader="readerForMultiResource"
                writer="multiResourceItemWriter"
                commit-interval="3">
            </batch:chunk>
        </batch:tasklet>
    </batch:step>

    <batch:job id="multiResourceWriterJob">
        <batch:step id="multiResourceStep" parent="multiResourceWriterParentStep"/>
    </batch:job>
</beans>

Basically I have one input file and more output files. I read the data from the input file (which contains 10 rows) and I want to write it by chunks of 3 into more than one output file (in my case there will be 4 files 3/3/3/1). For each chunk there will be one file. The job works correctly but the content of the output files is wrong. Each of the files contain the last element that has bean read in the current chunk.

Let's say the first chunk reads A, B and C. When this chunk is written to the file only the C is written and is written 3 times.

From my tests it works correctly only when the chunk commit-interval is 1.

Is this the correct behaviour? What am I doing wrong?

Thank you in advance


Solution

  • Your customer5TO bean should have scope prototype. Currently it is a singleton, so the bean properties will always be overwritten with the last item.