Spring Batch Admin Attachment with Job Definitions in Spring Load Application

I followed the example to get Spring Batch Admin running as spring-batch-admin-spring-boot boot application

This works as expected. I added a new configuration named BatchConfiguration, defined as:

package spring.batch.jobs;

... imports ...

@Configuration
public class BatchConfiguration {

    @Autowired
    private Environment env;

    @Autowired
    public PlatformTransactionManager transactionManager;

    @Autowired
    public JobRepository jobRepository;

    @Autowired
    public JobBuilderFactory jobBuilderFactory;

    @Autowired
    public StepBuilderFactory stepBuilderFactory;

    @Autowired
    public DataSource dataSource;

    @Bean
    public PlatformTransactionManager transactionManager(DataSource ds) {
        return new DataSourceTransactionManager(ds);
    }

    @Bean
    public StepBuilderFactory makeStepBuilderFactory() {
        return new StepBuilderFactory(jobRepository, transactionManager);
    }

    @Bean
    public JobRepository createJobRepository() throws Exception {
        JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
        factory.setDataSource(dataSource);
        factory.setTransactionManager(transactionManager);
        String isolationLevelForCreate = env.getProperty("batch.repository.isolationlevelforcreate");
        if (isolationLevelForCreate != null) {
            factory.setIsolationLevelForCreate(isolationLevelForCreate);
        }
        String tablePrefix = env.getProperty("batch.repository.tableprefix");
        if (tablePrefix != null) {
            factory.setTablePrefix(tablePrefix);
        }
        factory.afterPropertiesSet();
        return factory.getObject();
    }

    @Bean
    public JobBuilderFactory makeJobBuilderFactory() {
        return new JobBuilderFactory(jobRepository);
    }

    @Bean
    public DataSource dataSource(Environment e) {
        org.apache.tomcat.jdbc.pool.DataSource ds = new org.apache.tomcat.jdbc.pool.DataSource();
        ds.setDriverClassName(e.getProperty("batch.jdbc.driver"));
        ds.setUsername(e.getProperty("batch.jdbc.user"));
        ds.setPassword(e.getProperty("batch.jdbc.password"));
        ds.setUrl(e.getProperty("batch.jdbc.url"));
        return ds;
    }

    // tag::readerwriterprocessor[]
    @Bean
    public FlatFileItemReader<Person> reader() {
        FlatFileItemReader<Person> reader = new FlatFileItemReader<Person>();
        reader.setResource(new ClassPathResource("sample-data.csv"));
        reader.setLineMapper(new DefaultLineMapper<Person>() {{
            setLineTokenizer(new DelimitedLineTokenizer() {{
                setNames(new String[]{"firstName", "lastName"});
            }});
            setFieldSetMapper(new BeanWrapperFieldSetMapper<Person>() {{
                setTargetType(Person.class);
            }});
        }});
        return reader;
    }

    @Bean
    public PersonItemProcessor processor() {
        return new PersonItemProcessor();
    }

    @Bean
    public JdbcBatchItemWriter<Person> writer() {
        JdbcBatchItemWriter<Person> writer = new JdbcBatchItemWriter<Person>();
        writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<Person>());
        writer.setSql("INSERT INTO people (first_name, last_name) VALUES (:firstName, :lastName)");
        writer.setDataSource(dataSource);
        return writer;
    }
    // end::readerwriterprocessor[]

    // tag::jobstep[]
    @Bean
    public Job importUserJob() {
        return jobBuilderFactory.get("importUserJob")
                .incrementer(new RunIdIncrementer())
                .flow(step1())
                .end()
                .build();
    }

    @Bean
    public Step step1() {
        return stepBuilderFactory.get("step1")
                .<Person, Person>chunk(10)
                .reader(reader())
                .processor(processor())
                .writer(writer())
                .build();
    }
    // end::jobstep[]
}

      

Then I imported this configuration into MainConfiguration by adding the class to the import annotation.

After starting the application, I don't see what is importUserJob

listed on the assignments page. How can I properly register my work with the Spring batch admin so that it is listed and output from the UI?

+3


source to share


1 answer


I was able to define a job in my project registered with spring batch admin by adding the following to main / resources / META-INF / spring / batch / jobs / myjob.xml

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
       xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
       xmlns:context="http://www.springframework.org/schema/context"
       xsi:schemaLocation="
        http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch.xsd
        http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
        http://www.springframework.org/schema/context
        http://www.springframework.org/schema/context/spring-context-3.2.xsd">

    <context:annotation-config/>
    <context:component-scan base-package="spring.batch.jobs"/>
</beans>

      



It seems to be talking about the application context that spring.batch.jobs

there are annotation based configuration classes in the package that needs to be registered.

+2


source







All Articles