spring.batch.jdbc.initialize-schema=always not working anymore

1 day ago 2
ARTICLE AD BOX

Although, I don't use anotation @EnableBatchProcessing, but my application not use config of spring batch in file application.yml to create schema of metadata's spring batch. How can I fix it?

The version of java spring boot that I use: 4.0.3

This is my file batch config & custome GenericExcelStreamingReader):

package fa.training.topic_assignment.infrastructure.configurations; import fa.training.topic_assignment.domain.entities.*; import fa.training.topic_assignment.domain.repositories.*; import fa.training.topic_assignment.infrastructure.adapters.batchFile.GenericExcelStreamingReader; import fa.training.topic_assignment.infrastructure.adapters.batchFile.processor.AssessmentProcessor; import fa.training.topic_assignment.infrastructure.adapters.batchFile.processor.AssessmentTypeProcessor; import fa.training.topic_assignment.infrastructure.adapters.batchFile.processor.ProgrammingExerciseProcessor; import fa.training.topic_assignment.infrastructure.adapters.batchFile.processor.ProgrammingLanguageProcessor; import jakarta.persistence.EntityManagerFactory; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.batch.core.configuration.annotation.StepScope; import org.springframework.batch.infrastructure.item.ItemStreamReader; import org.springframework.batch.infrastructure.item.database.JpaItemWriter; import org.springframework.batch.infrastructure.item.database.builder.JpaItemWriterBuilder; import org.springframework.batch.infrastructure.item.file.builder.FlatFileItemReaderBuilder; import org.springframework.batch.infrastructure.item.file.mapping.DefaultLineMapper; import org.springframework.batch.infrastructure.item.file.transform.DelimitedLineTokenizer; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.io.FileSystemResource; import org.springframework.core.task.TaskExecutor; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import java.util.HashMap; import java.util.Map; @Slf4j(topic = "BATCH_CONFIG") @Configuration @RequiredArgsConstructor public class BatchConfig { @Bean public TaskExecutor taskExecutor() { log.info("Initializing TaskExecutor for batch processing"); ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor(); taskExecutor.setCorePoolSize(5); taskExecutor.setMaxPoolSize(10); taskExecutor.setThreadNamePrefix("Batch-file-"); taskExecutor.initialize(); return taskExecutor; } @Bean @StepScope public ItemStreamReader<Map<Integer, String>> itemReader( @Value("#{stepExecutionContext['filePath']}") String filePath, @Value("#{stepExecutionContext['startRow']}") int startRow, @Value("#{stepExecutionContext['endRow']}") int endRow) { log.info("Creating ItemReader for file: {}, startRow: {}, endRow: {}", filePath, startRow, endRow); if (filePath.toLowerCase().endsWith(".csv")) { return new FlatFileItemReaderBuilder<Map<Integer, String>>() .name("csvGenericReader") .resource(new FileSystemResource(filePath)) .lineMapper(new DefaultLineMapper<Map<Integer, String>>() {{ setLineTokenizer(new DelimitedLineTokenizer() {{ setDelimiter(","); }}); setFieldSetMapper(fieldSet -> { Map<Integer, String> map = new HashMap<>(); for (int i = 0; i < fieldSet.getFieldCount(); i++) { map.put(i, fieldSet.readString(i)); } return map; }); }}) .currentItemCount(startRow > 0 ? startRow - 1 : 0) .maxItemCount(endRow) .saveState(true) .build(); } if (filePath.toLowerCase().endsWith(".xlsx")) { return new GenericExcelStreamingReader(filePath, startRow, endRow); } throw new RuntimeException("File can not be supported: " + filePath); } @Bean public JpaItemWriter<AssessmentType> assessmentTypeWriter(EntityManagerFactory emf) { return new JpaItemWriterBuilder<AssessmentType>() .entityManagerFactory(emf) .build(); } @Bean public JpaItemWriter<Assessment> assessmentWriter(EntityManagerFactory emf) { return new JpaItemWriterBuilder<Assessment>() .entityManagerFactory(emf) .build(); } @Bean public JpaItemWriter<ProgrammingLanguage> programmingLanguageWriter(EntityManagerFactory emf) { return new JpaItemWriterBuilder<ProgrammingLanguage>() .entityManagerFactory(emf) .build(); } @Bean public JpaItemWriter<ProgrammingExercise> programmingExerciseWriter(EntityManagerFactory emf) { return new JpaItemWriterBuilder<ProgrammingExercise>() .entityManagerFactory(emf) .build(); } @Bean public JpaItemWriter<Question> questionJpaItemWriter(EntityManagerFactory emf) { return new JpaItemWriterBuilder<Question>() .entityManagerFactory(emf) .build(); } @Bean public AssessmentTypeProcessor assessmentTypeProcessor() { return new AssessmentTypeProcessor(); } @Bean public AssessmentProcessor assessmentProcessor(AssessmentTypeRepository typeRepository, ProgrammingExerciseRepository exerciseRepository, QuestionRepository questionRepository ) { return new AssessmentProcessor(typeRepository, questionRepository, exerciseRepository); } @Bean public ProgrammingLanguageProcessor programmingLanguageProcessor() { return new ProgrammingLanguageProcessor(); } @Bean public ProgrammingExerciseProcessor programmingExerciseProcessor(ProgrammingLanguageRepository languageRepository, CategoryRepository categoryRepository) { return new ProgrammingExerciseProcessor(languageRepository, categoryRepository); } } package fa.training.topic_assignment.infrastructure.adapters.batchFile; import com.github.pjfanning.xlsx.StreamingReader; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Workbook; import org.jspecify.annotations.NonNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.batch.infrastructure.item.ItemStreamException; import org.springframework.batch.infrastructure.item.ItemStreamReader; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.util.HashMap; import java.util.Iterator; import java.util.Map; public class GenericExcelStreamingReader implements ItemStreamReader<Map<Integer, String>> { private static final Logger log = LoggerFactory.getLogger(GenericExcelStreamingReader.class); private static final String CURRENT_ROW_KEY = "GenericExcelStreamingReader.current.row.index"; private final String filePath; private final int startRow; private final int endRow; private int currentRow = 0; private InputStream inputStream; private Workbook workbook; private Iterator<Row> rowIterator; public GenericExcelStreamingReader(String filePath, int startRow, int endRow) { this.filePath = filePath; this.startRow = startRow; this.endRow = endRow; } @Override public void open(@NonNull ExecutionContext executionContext) throws ItemStreamException { try { inputStream = new FileInputStream(new File(filePath)); workbook = StreamingReader.builder() .rowCacheSize(100) .bufferSize(4096) .open(inputStream); rowIterator = workbook.getSheetAt(0).iterator(); if (executionContext.containsKey(CURRENT_ROW_KEY)) { this.currentRow = executionContext.getInt(CURRENT_ROW_KEY); } else { this.currentRow = 0; } int targetStart = Math.max(currentRow, startRow - 1); if (targetStart == 0) { targetStart += 1; // Skip header row if starting from the beginning } int skipCount = 0; while (skipCount < targetStart && rowIterator.hasNext()) { rowIterator.next(); skipCount++; } this.currentRow = skipCount; log.info("Opened Excel reader at row: {}, targeting start: {}", currentRow, startRow); } catch (Exception e) { throw new ItemStreamException("Error when opening file: " + filePath, e); } } @Override public Map<Integer, String> read() { if (rowIterator == null || !rowIterator.hasNext() || currentRow >= endRow) { return null; } Row row = rowIterator.next(); currentRow++; Map<Integer, String> data = new HashMap<>(); for (int i = 0; i < row.getLastCellNum(); i++) { Cell cell = row.getCell(i); data.put(i, getCellValue(cell)); } return data; } @Override public void update(@NonNull ExecutionContext executionContext) { executionContext.putInt(CURRENT_ROW_KEY, currentRow); } @Override public void close() throws ItemStreamException { try { if (workbook != null) workbook.close(); if (inputStream != null) inputStream.close(); log.info("Closed Excel reader at row: {}", currentRow); } catch (Exception e) { throw new ItemStreamException("Error when closing file", e); } } private String getCellValue(Cell cell) { if (cell == null) return ""; return switch (cell.getCellType()) { case STRING -> cell.getStringCellValue().trim(); case NUMERIC -> String.valueOf(cell.getNumericCellValue()); case BOOLEAN -> String.valueOf(cell.getBooleanCellValue()); default -> ""; }; } }

This is my application config:

server: port: ${SERVER_PORT:8090} spring: application: name: topic_assignment devtools: add-properties: true datasource: url: ${DB_URL:jdbc:postgresql://localhost:5432/topic_assignment} username: ${DB_USERNAME:postgres} password: ${DB_PASSWORD:postgres} driver-class-name: org.postgresql.Driver jpa: database-platform: org.hibernate.dialect.PostgreSQLDialect show-sql: true hibernate: ddl-auto: update properties: hibernate: "[format_sql]": true jdbc: "[time_zone]": UTC batch_size: 30 order_inserts: true order_updates: true batch: jdbc: initialize-schema: always servlet: multipart: max-file-size: 50MB max-request-size: 500MB
Read Entire Article