Announcement Announcement Module
Collapse
No announcement yet.
Database to file using JpaPagingItemReader with 100k record Page Title Module
Move Remove Collapse
X
Conversation Detail Module
Collapse
  • Filter
  • Time
  • Show
Clear All
new posts

  • Database to file using JpaPagingItemReader with 100k record

    Facing a weird issue while trying to export 5000 records with commit-interval 1000, pagesize=1000.
    On execution it processes 3000 records and if run again it takes remaining 2000. Tried with 100,000 records, on first run it exports 50,000 records then 25,000 then 13,000 so on !!.

    Though we set commit-interval (chunk size) to 1000 and if we have 100,000 records that satisfy the query condition, the chunk should read in block of 1000? and on this block, paging should happen based on the page size.

    I cant find a documentation which gives a hint about this (except in the java doc saying we need to use 1000).

    It will be a great help, if Dave or some one in team shed some light on this issue. At least where is this calculation happening and why is it trimming down to 1/2 size.

    Here is the configuration, Im using:

    <bean id="jobRepository" class="org.springframework.batch.core.repository.s upport.MapJobRepositoryFactoryBean">
    <property name="isolationLevelForCreate" value="ISOLATION_DEFAULT" />
    <property name="transactionManager" ref="transactionManager"/>
    </bean>

    <bean id="exportJobLauncher" class="org.springframework.batch.core.launch.suppo rt.SimpleJobLauncher">
    <property name="jobRepository" ref="jobRepository" />
    </bean>

    <batch:job id="empExporter">
    <batch:step id="exportStep">
    <batch:tasklet>
    <batch:chunk reader="empReader" writer="empWriter"
    commit-interval="1000" skip-limit="1000000" >
    <batch:skippable-exception-classes>
    <batch:include class="java.lang.Throwable"/>
    </batch:skippable-exception-classes>
    <batch:listeners>
    <batch:listener ref="exportListener" />
    </batch:listeners>
    </batch:chunk>
    </batch:tasklet>
    </batch:step>
    <batch:listeners>
    <batch:listener ref="exportListener"/>
    </batch:listeners>
    </batch:job>


    <bean id="empReader" class="org.springframework.batch.item.database.Jpa PagingItemReader" scope="step">
    <property name="entityManagerFactory" ref="entityManagerFactory"/>
    <property name="queryString" value="select e from Employee e where e.exported = false"/>
    <property name="pageSize" value="1000"/>
    </bean>

    <bean id="empWriter" class="org.springframework.batch.item.file.FlatFil eItemWriter" scope="step">
    <property name="resource" value="file:#{jobParameters['export.resource.path']}" />
    <property name="shouldDeleteIfEmpty" value="true"/>
    <property name="lineAggregator">
    <bean class="org.springframework.batch.item.file.transfo rm.DelimitedLineAggregator">
    <property name="delimiter">
    <util:constant static-field="org.springframework.batch.item.file.transfo rm.DelimitedLineTokenizer.DELIMITER_TAB"/>
    </property>
    <property name="fieldExtractor" ref="empFieldExtractor" />
    </bean>
    </property>
    <property name="headerCallback" ref="headWriter"/>
    </bean>

    <bean id="headWriter" class="com.sample.batch.exporter.ExportListener" />
    <bean id="empFieldExtractor" class="com.sample.batch.exporter.EmployeeFieldExtr actor" />
    Last edited by dev-100; Mar 31st, 2012, 04:26 PM.
Working...
X