Spring批处理无法在我的集成测试中加载JobBuilderFactory
我有一个配置,可以成功运行并加载细胞系数据,并发布到细胞系主题中的各个收件人。它工作正常,但是当我尝试加载JobLauncherTestUtils和JobRepositoryTestUtils时,我得到一个错误,表明找不到JobBuilderFactory。正如您将从我的配置中看到的,我确实使用委托给Spring的Lombok加载了JobBuilderFactory和StepBuilderFactory。正如我所说,所有这些都可以正常工作,但这里的测试是测试配置yaml文件
应用程序-test.yml
spring:
sql:
init:
schema-locations: classpath:db/migration
platform: derby
jobmeta-ds:
datasource:
driver-class-name: org.apache.derby.jdbc.EmbeddedDriver
url: jdbc:derby:support/jhhmeta;create=true
password:
jndi-name: false
cell-datasource:
datasource:
driver-class-name: oracle.jdbc.driver.OracleDriver
url: jdbc:oracle:thin:@localhost:1521:xe
password:
jndi-name: false
以下是数据源:
// CellDbConfig class
@Configuration
public class CellDbConfig {
@Bean
@ConfigurationProperties("cell-datasource")
public DataSourceProperties cellLineDataSourceProperties() {
return new DataSourceProperties();
}
@Bean(name = "cellDataSource")
public DataSource cellDataSource() {
HikariDataSource dataSource = cellLineDataSourceProperties().initializeDataSourceBuilder().type(HikariDataSource.class)
.build();
return dataSource;
}
@Bean(name = "cellJdbcTemplate")
public JdbcTemplate cellJdbcTemplate(@Qualifier("cellDataSource") DataSource clarityDataSource) {
return new JdbcTemplate(cellDataSource);
}
}
以下是JobRepository数据源配置的另一个数据源
@Configuration
public class JobRepoMetadataDbConfig {
@Primary
@Bean
@ConfigurationProperties("jobmeta.datasource")
public DataSourceProperties jobMetadataProperties() {
return new DataSourceProperties();
}
@Primary
@Bean(name = "jobMetaDataSource")
public DataSource dataSourceJobMeta() {
DataSource dataSource = jobMetadataProperties().initializeDataSourceBuilder().type(BasicDataSource.class)
.build();
return dataSource;
}
@Bean(name = "jobMetaJdbcTemplate")
public JdbcTemplate jobMetaJdbcTemplate(@Qualifier("jobMetaDataSource") DataSource jobMetaDataSource) {
return new JdbcTemplate(jobMetaDataSource);
}
}
以下是Spring批处理的特定配置,即JobRepository等。
@Configuration
@EnableBatchProcessing
@RequiredArgsConstructor
public class JobRepoConfig {
@Qualifier("jobMetaDataSource")
final DataSource jobMetaDataSource;
@Bean
AbstractPlatformTransactionManager jobTransactionManager() {
return new ResourcelessTransactionManager();
}
@Bean
public JobRepositoryFactoryBean jobRepositoryFactory() throws Exception {
JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
factory.setDataSource(jobMetaDataSource);
factory.setTransactionManager(jobTransactionManager());
factory.afterPropertiesSet();
return factory;
}
@Bean
public JobRepository jobRepository() throws Exception {
JobRepositoryFactoryBean jobRepositoryFactoryBean = new JobRepositoryFactoryBean();
jobRepositoryFactoryBean.setDataSource(jobMetaDataSource);
jobRepositoryFactoryBean.setTransactionManager(jobTransactionManager());
jobRepositoryFactoryBean.setDatabaseType(DatabaseType.H2.getProductName());
return jobRepositoryFactoryBean.getObject();
}
@Bean
public SimpleJobLauncher launchAppJobLauncher() throws Exception{
SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
simpleJobLauncher.setJobRepository(jobRepository());
return simpleJobLauncher;
}
}
以下是发布细胞系数据的KafkaProducer配置:
@Configuration
@Slf4j
public class ProducerConfig {
@Value("${spring.kafka.template.default-topic}")
private String cellsTopic;
@Bean
public ProducerFactory<Long, CellVO> kafkaProducerFactory(KafkaProperties kafkaProperties) {
var producerProperties = kafkaProperties.buildProducerProperties();
var sslProperties = kafkaProperties.getSsl().buildProperties();
Map<String, Object> props = new HashMap<>(producerProperties);
if (!CollectionUtils.isEmpty(sslProperties)) {
props.putAll(sslProperties);
}
return new DefaultKafkaProducerFactory<>(props);
}
@Bean
public KafkaTemplate<Long, CellVO> kafkaTemplate(ProducerFactory<Long, CellVO> kafkaProducerFactory) {
KafkaTemplate<Long, CellVO> kafkaTemplate = new KafkaTemplate<Long, CellVO>(kafkaProducerFactory);
kafkaTemplate.setDefaultTopic(cellsTopic);
return kafkaTemplate;
}
}
以下是Spring批量测试类:
@SpringBatchTest
@SpringBootTest
@ActiveProfiles("test")
@Tag("integration")
@EnableAutoConfiguration
public class CellCongTest {
@Autowired
private JobLauncherTestUtils jobLauncherTestUtils;
@Autowired
private JobRepositoryTestUtils jobRepositoryTestUtils;
@Test
public void testSuccessfulLoad() throws Exception {
}
}
最后是批处理作业本身:
@Configuration
@EnableScheduling
@RequiredArgsConstructor
@Slf4j
public class CellBatchJobConfig {
final JobBuilderFactory jobBuilderFactory;
final JobLauncher jobAppJobLauncher;
final StepBuilderFactory stepBuilderFactory;
final KafkaTemplate<Long, CellVO> kafkaTemplate;
final KafkaItemWriteListener kafkaItemWriteListener;
final static String CELL_LINE_JOB = "CELL_LINE_JOB";
@Value("${chunk-size}")
private int chunkSize;
@Qualifier("cellDataSource")
final DataSource cellDataSource;
@Bean
public JdbcPagingItemReader cellDataReader(
PagingQueryProvider pagingQueryProvider) {
return new JdbcPagingItemReaderBuilder()
.name("cellDataReader")
.dataSource(cellDataSource)
.queryProvider(pagingQueryProvider)
.pageSize(chunkSize)
.rowMapper(new CellRowMapper())
.build();
}
@Bean
public PagingQueryProvider pagingQueryProvider() {
OraclePagingQueryProvider pagingQueryProvider = new OraclePagingQueryProvider();
final Map<String, Order> sortKeys = new HashMap<>();
sortKeys.put("CELL_ID", Order.ASCENDING);
pagingQueryProvider.setSortKeys(sortKeys);
pagingQueryProvider.setSelectClause(" CELL_ID, CELL_TYPE, SITE, CELL_QUALITY_LINE ");
pagingQueryProvider.setFromClause(" FROM DCV.CELL_LINES");
return pagingQueryProvider;
}
@Bean
public KafkaItemWriter<Long, CellVO> kafkaItemWriter() throws Exception {
KafkaItemWriter<Long, CellVO> kafkaItemWriter = new KafkaItemWriter<>();
kafkaItemWriter.setKafkaTemplate(kafkaTemplate);
kafkaItemWriter.setItemKeyMapper(CellVO::getLocationId);
kafkaItemWriter.setDelete(false);
kafkaItemWriter.afterPropertiesSet();
return kafkaItemWriter;
}
@Bean
public Step loadCellLines() throws Exception {
return stepBuilderFactory.get("step1")
.<CellVO, CellVO>chunk(chunkSize)
.reader(cellDataReader(pagingQueryProvider()))
.writer(kafkaItemWriter())
.listener(kafkaItemWriteListener)
.build();
}
@Bean
public Job cellLineJob() throws Exception {
return jobBuilderFactory.get(CELL_LINE_JOB)
.incrementer(new RunIdIncrementer())
.start(loadCellLines())
.build();
}
@Bean("jobParameters")
JobParameters jobParameters() {
JobParameters jobParameters = new JobParametersBuilder()
.addString("jobId", UUID.randomUUID().toString())
.addDate("date", new Date())
.addLong("time", System.currentTimeMillis()).toJobParameters();
return jobParameters;
}
@Scheduled(cron = "0 0 5 * * *")
public Job runCellLineJob() throws Exception {
kafkaItemWriteListener.setItems(new ArrayList<>());
return jobBuilderFactory.get(CELL_LINE_JOB)
.incrementer(new RunIdIncrementer())
.start(loadCellLines())
.build();
}
}
不幸的是,测试失败,并显示无法加载应用程序上下文的消息:
错误如下:
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'jobLauncherTestUtils':
通过方法“setJob”参数0表示的未满足的依赖关系;嵌套的异常是org。springframework。豆。工厂UnsatisfiedPendencyException:创建名为“cellBatchJobConfig”的bean时出错:通过构造函数参数0表示的未满足依赖项;嵌套的异常是org。springframework。豆。工厂NoSuchBean定义异常:没有“org”类型的合格bean。springframework。一批果心配置注释。JobBuilderFactory可用:至少需要1个符合autowire候选资格的bean。依赖项注释:{}
我尝试过的一件事是手动注入作业,但没有成功:如果它可以在实际配置中找到作业,但在测试中找不到,我甚至不知道为什么它应该能够找到作业
@Configuration
class JobLaunchUtilsCellLine {
@Autowired
@Qualifier("cellLineJob")
Job cellLineJob;
@Bean
public JobLauncherTestUtils cellLineJobLauncherUtils(){
JobLauncherTestUtils jobLauncherTestUtils = new JobLauncherTestUtils();
jobLauncherTestUtils.setJob(cellLineJob);
return jobLauncherTestUtils;
}
}
然后我在Spring批量测试中这样注入它,但它不起作用:
@Qualifier("cellLineJobLauncherUtils")
@Autowired
JobLauncherTestUtils cellLineJobLauncherUtils;
然而,它仍然抱怨JobBuilderFactory bean不存在