Announcement Announcement Module
Collapse
No announcement yet.
Out of memory issue with scheduled spring batch jobs Page Title Module
Move Remove Collapse
X
Conversation Detail Module
Collapse
  • Filter
  • Time
  • Show
Clear All
new posts

  • Out of memory issue with scheduled spring batch jobs

    I have a serious issue with Spring batch running out of memory after executing several jobs within a web application.

    I use the following class to start up jobs within the web application:
    Code:
    public abstract class AbstractJobManagerImpl implements ApplicationContextAware, JobManager {
    
        /** Parameter for the unique identifier of an application context. */
        protected static final String APPLICATION_CONTEXT_ID = "APPLICATION_CONTEXT_ID";
    
        /** Logger for this class. */
        private static final Log LOGGER = LogFactory.getLog(AbstractJobManagerImpl.class);
    
        /**
         * List containing spring XML configuration files that are needed to start a
         * batch job.
         */
        protected String[] configLocations;
    
        /** The name of the job that will be executed by this manager. */
        protected String jobName;
    
        /**
         * The parent application context that contains the beans of the web
         * application itself. This way every job that is started can reuse the
         * shared beans in the parent application context.
         */
        protected ApplicationContext parentContext;
    
        /** The job execution contexts that are still running. */
        protected Set<JobExecution> jobs;
    
        protected Map<String, ClassPathXmlApplicationContext> applicationContexts;
    
        /**
         * Default constructor.
         */
        public AbstractJobManagerImpl() {
            jobs = new HashSet<JobExecution>();
            applicationContexts = new HashMap<String, ClassPathXmlApplicationContext>();
        }
    
        public void startJob(final JobParameters jobParameters) {
            if (LOGGER.isInfoEnabled()) {
                LOGGER.info("Start up application context..");
            }
    
            /*
             * Start up fresh application context for the job.
             */
            ClassPathXmlApplicationContext applicationContext = null;
            try {
                applicationContext = new ClassPathXmlApplicationContext(configLocations, parentContext);
    
                /*
                 * Register application context so it can be destroyed.
                 */
                final String applicationContextId = jobParameters.getString(APPLICATION_CONTEXT_ID);
    
                if (applicationContext == null || "".equals(applicationContextId)) {
                    final String msg = "No application context ID!!";
                    if (LOGGER.isErrorEnabled()) {
                        LOGGER.error(msg);
                    }
                    throw new BatchJobException(msg);
                }
    
                applicationContext.setId(applicationContextId);
                applicationContexts.put(applicationContextId, applicationContext);
            } catch (final Exception e) {
                LOGGER.error("Failed to startup application context!!");
                LOGGER.error(e.getMessage());
                e.printStackTrace();
            }
    
            if (LOGGER.isInfoEnabled()) {
                LOGGER.info("Application context started!");
            }
    
            /*
             * Retrieve the job launcher and the job that needs to be executed.
             */
            final JobLauncher jobLauncher = (JobLauncher) applicationContext.getBean("jobLauncher");
            final Job job = (Job) applicationContext.getBean(jobName);
    
            if (LOGGER.isInfoEnabled()) {
                LOGGER.info("Starting job with parameters:");
                final Map<String, JobParameter> parameters = jobParameters.getParameters();
                final Iterator<Entry<String, JobParameter>> iterator = parameters.entrySet().iterator();
    
                while (iterator.hasNext()) {
                    final Entry<String, JobParameter> entry = iterator.next();
                    LOGGER.info(entry.getKey() + " " + entry.getValue().getValue());
                }
            }
    
            /*
             * Execute the job.
             */
            try {
                jobLauncher.run(job, jobParameters);
            } catch (final JobParametersInvalidException e) {
                if (LOGGER.isErrorEnabled()) {
                    LOGGER.error("Job parameters are invalid!! -> " + e.getMessage());
                }
                throw new BatchJobException("Job parameters are invalid!! ", e);
            } catch (final JobExecutionAlreadyRunningException e) {
                if (LOGGER.isErrorEnabled()) {
                    LOGGER.error("Job is already running!! -> " + e.getMessage());
                }
                throw new BatchJobException("Job execution failed!! ", e);
            } catch (final JobRestartException e) {
                if (LOGGER.isErrorEnabled()) {
                    LOGGER.error("Couldn't restart job!! -> " + e.getMessage());
                }
                throw new BatchJobException("Job execution failed!! ", e);
            } catch (final JobInstanceAlreadyCompleteException e) {
                if (LOGGER.isErrorEnabled()) {
                    LOGGER.error("Job already finished!! -> " + e.getMessage());
                }
                throw new BatchJobException("Job execution failed!! ", e);
            }
        }
    
        public void registerJob(final JobExecution jobExecution) {
            jobs.add(jobExecution);
        }
    
        public void unregisterJob(final JobExecution jobExecution) {
            jobs.remove(jobExecution);
    
            /*
             * Get job parameter.
             */
            final JobInstance jobInstance = jobExecution.getJobInstance();
            final JobParameters jobParameters = jobInstance.getJobParameters();
            final String parameterValue = jobParameters.getString(APPLICATION_CONTEXT_ID);
    
            /*
             * Kill application context.
             */
            ClassPathXmlApplicationContext context = applicationContexts.get(parameterValue);
            if (context != null) {
                LOGGER.debug("Closing application context!");
    
                context.close();
                applicationContexts.remove(parameterValue);
    
                LOGGER.debug("Application context removed!");
    
                /*
                 * Mark for garbage collection.
                 */
                context = null;
            }
    
        }
    
        public void stopJobs() {
            for (final JobExecution jobExecution : jobs) {
                jobExecution.stop();
            }
        }
    
        public Set<JobExecution> getJobs() {
            return Collections.unmodifiableSet(jobs);
        }
    
        /**
         * @param configLocations
         *        the configLocations to set
         */
        @Required
        public void setConfigLocations(final String[] configLocations) {
            this.configLocations = configLocations;
        }
    
        /**
         * @param jobName
         *        the jobName to set
         */
        @Required
        public void setJobName(final String jobName) {
            this.jobName = jobName;
        }
    
        public void setApplicationContext(final ApplicationContext context) throws BeansException {
            parentContext = context;
        }
    
    }
    As you can see when the job finishes there is a method that can destroy the application context. This method is called by the JobExecutionListener on job finish. This seems to work fine but for some reason I see in my profiler that the ClassPathXmlApplicationContext bean allocations keeps growing for some reason eventhough I close it and mark it for garbage collection.

    I also read somewhere on the forum that you shouldn't create application contexts over and over but what is the alternative then if you have a running application with scheduled jobs. How do you start your statefull Job beans?

    A way could be loading all beans in your application context of your application on startup and configuring the statefull beans as http request scope and start your jobs through a web request. Personally I don't really like that approach since the application will be calling itself through HTTP which is a bit weird I would say. Does anybody know any alternative on how to have the same end result without having to call your application through HTTP?

  • #2
    I think I found a better solution for the issue. It seems it's not necessary to load the job beans with a different scope. I was under the assumption that the job beans were statefull so I couldn't load them on start up but that doesn't seem to be the case as far as I can see. So now I just refactored the AbstractJobManager to look like this:
    Code:
        /** Logger for this class. */
        private static final Log LOGGER = LogFactory.getLog(AbstractJobManagerImpl.class);
    
        /** The job execution contexts that are still running. */
        protected Set<JobExecution> jobs;
    
        /** The job launcher. */
        private JobLauncher jobLauncher;
    
        /** The job that will be launched. */
        private Job job;
    
        /**
         * Default constructor.
         */
        public AbstractJobManagerImpl() {
            jobs = new HashSet<JobExecution>();
        }
    
        public void startJob(final JobParameters jobParameters) {
    
            if (LOGGER.isInfoEnabled()) {
                LOGGER.info("Starting job with parameters:");
                final Map<String, JobParameter> parameters = jobParameters.getParameters();
                final Iterator<Entry<String, JobParameter>> iterator = parameters.entrySet().iterator();
    
                while (iterator.hasNext()) {
                    final Entry<String, JobParameter> entry = iterator.next();
                    LOGGER.info(entry.getKey() + " " + entry.getValue().getValue());
                }
            }
    
            /*
             * Execute the job.
             */
            try {
                jobLauncher.run(job, jobParameters);
            } catch (final JobParametersInvalidException e) {
                if (LOGGER.isErrorEnabled()) {
                    LOGGER.error("Job parameters are invalid!! -> " + e.getMessage());
                }
                throw new BatchJobException("Job parameters are invalid!! ", e);
            } catch (final JobExecutionAlreadyRunningException e) {
                if (LOGGER.isErrorEnabled()) {
                    LOGGER.error("Job is already running!! -> " + e.getMessage());
                }
                throw new BatchJobException("Job execution failed!! ", e);
            } catch (final JobRestartException e) {
                if (LOGGER.isErrorEnabled()) {
                    LOGGER.error("Couldn't restart job!! -> " + e.getMessage());
                }
                throw new BatchJobException("Job execution failed!! ", e);
            } catch (final JobInstanceAlreadyCompleteException e) {
                if (LOGGER.isErrorEnabled()) {
                    LOGGER.error("Job already finished!! -> " + e.getMessage());
                }
                throw new BatchJobException("Job execution failed!! ", e);
            }
        }
    
        public void registerJob(final JobExecution jobExecution) {
            jobs.add(jobExecution);
        }
    
        public void unregisterJob(final JobExecution jobExecution) {
            jobs.remove(jobExecution);
        }
    
        public void stopJobs() {
            for (final JobExecution jobExecution : jobs) {
                jobExecution.stop();
            }
        }
    
        public Set<JobExecution> getJobs() {
            return Collections.unmodifiableSet(jobs);
        }
    
        /**
         * @param jobLauncher
         *        the jobLauncher to set
         */
        @Required
        public void setJobLauncher(final JobLauncher jobLauncher) {
            this.jobLauncher = jobLauncher;
        }
    
        /**
         * @param job
         *        the job to set
         */
        @Required
        public void setJob(final Job job) {
            this.job = job;
        }

    Comment


    • #3
      Hey,

      I didn't see your code, but if i understand well, you're creating the ApplicationContext each time you want to start a job.
      If it's what you're doing, I suggest you to use a scheduler (Quartz is a simple one that I've used). Then, you need to load the application context only once, when you start your application. The scheduler thread will start your jobs according to triggers that you give to him.

      One thing about the use of Quartz and Spring, some classes have been created for simpler use of Quartz with Spring, but for now these classes support only old version of Quartz (1.x).

      Comment

      Working...
      X