Repository: spring-cloud/spring-cloud-task Branch: main Commit: 6cd60f570c49 Files: 441 Total size: 1.3 MB Directory structure: gitextract_72_za3bm/ ├── .editorconfig ├── .github/ │ ├── dco.yml │ └── workflows/ │ ├── ci-maven-main.yml │ ├── ci.yaml │ └── deploy-docs.yml ├── .gitignore ├── .mvn/ │ ├── jvm.config │ ├── maven.config │ └── wrapper/ │ ├── maven-wrapper.jar │ └── maven-wrapper.properties ├── .settings.xml ├── .springformat ├── CONTRIBUTING.adoc ├── LICENSE ├── README.adoc ├── docs/ │ ├── antora-playbook.yml │ ├── antora.yml │ ├── modules/ │ │ └── ROOT/ │ │ ├── nav.adoc │ │ ├── pages/ │ │ │ ├── _attributes.adoc │ │ │ ├── appendix-building-the-documentation.adoc │ │ │ ├── appendix-task-repository-schema.adoc │ │ │ ├── appendix.adoc │ │ │ ├── batch-starter.adoc │ │ │ ├── batch.adoc │ │ │ ├── configprops.adoc │ │ │ ├── features.adoc │ │ │ ├── getting-started.adoc │ │ │ ├── index.adoc │ │ │ ├── observability.adoc │ │ │ ├── preface.adoc │ │ │ └── stream.adoc │ │ └── partials/ │ │ ├── _configprops.adoc │ │ ├── _conventions.adoc │ │ ├── _metrics.adoc │ │ └── _spans.adoc │ ├── package.json │ ├── pom.xml │ └── src/ │ └── main/ │ ├── antora/ │ │ └── resources/ │ │ └── antora-resources/ │ │ └── antora.yml │ ├── asciidoc/ │ │ ├── .gitignore │ │ ├── Guardfile │ │ ├── README.adoc │ │ ├── index.htmladoc │ │ ├── index.htmlsingleadoc │ │ ├── index.pdfadoc │ │ ├── sagan-index.adoc │ │ ├── spring-cloud-task.epubadoc │ │ ├── spring-cloud-task.htmlsingleadoc │ │ └── spring-cloud-task.pdfadoc │ └── javadoc/ │ └── spring-javadoc.css ├── mvnw ├── mvnw.cmd ├── pom.xml ├── spring-cloud-starter-single-step-batch-job/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── springframework/ │ │ │ └── cloud/ │ │ │ └── task/ │ │ │ └── batch/ │ │ │ └── autoconfigure/ │ │ │ ├── RangeConverter.java │ │ │ ├── SingleStepJobAutoConfiguration.java │ │ │ ├── SingleStepJobProperties.java │ │ │ ├── flatfile/ │ │ │ │ ├── FlatFileItemReaderAutoConfiguration.java │ │ │ │ ├── FlatFileItemReaderProperties.java │ │ │ │ ├── FlatFileItemWriterAutoConfiguration.java │ │ │ │ ├── FlatFileItemWriterProperties.java │ │ │ │ └── package-info.java │ │ │ ├── jdbc/ │ │ │ │ ├── JDBCSingleStepDataSourceAutoConfiguration.java │ │ │ │ ├── JdbcBatchItemWriterAutoConfiguration.java │ │ │ │ ├── JdbcBatchItemWriterProperties.java │ │ │ │ ├── JdbcCursorItemReaderAutoConfiguration.java │ │ │ │ ├── JdbcCursorItemReaderProperties.java │ │ │ │ └── package-info.java │ │ │ ├── kafka/ │ │ │ │ ├── KafkaItemReaderAutoConfiguration.java │ │ │ │ ├── KafkaItemReaderProperties.java │ │ │ │ ├── KafkaItemWriterAutoConfiguration.java │ │ │ │ ├── KafkaItemWriterProperties.java │ │ │ │ └── package-info.java │ │ │ ├── package-info.java │ │ │ └── rabbit/ │ │ │ ├── AmqpItemReaderAutoConfiguration.java │ │ │ ├── AmqpItemReaderProperties.java │ │ │ ├── AmqpItemWriterAutoConfiguration.java │ │ │ ├── AmqpItemWriterProperties.java │ │ │ └── package-info.java │ │ └── resources/ │ │ └── META-INF/ │ │ ├── spring/ │ │ │ └── org.springframework.boot.autoconfigure.AutoConfiguration.imports │ │ └── spring-configuration-metadata.json │ └── test/ │ ├── java/ │ │ └── org/ │ │ └── springframework/ │ │ └── cloud/ │ │ └── task/ │ │ └── batch/ │ │ └── autoconfigure/ │ │ ├── RangeConverterTests.java │ │ ├── SingleStepJobAutoConfigurationTests.java │ │ ├── flatfile/ │ │ │ ├── FlatFileItemReaderAutoConfigurationTests.java │ │ │ └── FlatFileItemWriterAutoConfigurationTests.java │ │ ├── jdbc/ │ │ │ ├── JdbcBatchItemWriterAutoConfigurationTests.java │ │ │ └── JdbcCursorItemReaderAutoConfigurationTests.java │ │ ├── kafka/ │ │ │ ├── KafkaItemReaderAutoConfigurationTests.java │ │ │ └── KafkaItemWriterTests.java │ │ └── rabbit/ │ │ ├── AmqpItemReaderAutoConfigurationTests.java │ │ └── AmqpItemWriterAutoConfigurationTests.java │ └── resources/ │ ├── logback-test.xml │ ├── schema-h2.sql │ ├── test.txt │ ├── testUTF16.csv │ ├── testUTF8.csv │ └── writerTestUTF16.txt ├── spring-cloud-starter-task/ │ └── pom.xml ├── spring-cloud-task-batch/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── springframework/ │ │ │ └── cloud/ │ │ │ └── task/ │ │ │ └── batch/ │ │ │ ├── configuration/ │ │ │ │ ├── JobLaunchCondition.java │ │ │ │ ├── TaskBatchAutoConfiguration.java │ │ │ │ ├── TaskBatchExecutionListenerBeanPostProcessor.java │ │ │ │ ├── TaskBatchExecutionListenerFactoryBean.java │ │ │ │ ├── TaskBatchProperties.java │ │ │ │ ├── TaskJobLauncherApplicationRunnerFactoryBean.java │ │ │ │ ├── TaskJobLauncherAutoConfiguration.java │ │ │ │ └── package-info.java │ │ │ ├── handler/ │ │ │ │ ├── TaskJobLauncherApplicationRunner.java │ │ │ │ └── package-info.java │ │ │ └── listener/ │ │ │ ├── TaskBatchDao.java │ │ │ ├── TaskBatchExecutionListener.java │ │ │ ├── package-info.java │ │ │ └── support/ │ │ │ ├── JdbcTaskBatchDao.java │ │ │ ├── MapTaskBatchDao.java │ │ │ └── package-info.java │ │ └── resources/ │ │ └── META-INF/ │ │ ├── additional-spring-configuration-metadata.json │ │ └── spring/ │ │ └── org.springframework.boot.autoconfigure.AutoConfiguration.imports │ └── test/ │ ├── java/ │ │ └── org/ │ │ └── springframework/ │ │ └── cloud/ │ │ └── task/ │ │ └── batch/ │ │ ├── configuration/ │ │ │ ├── TaskBatchTest.java │ │ │ └── TaskJobLauncherAutoConfigurationTests.java │ │ ├── handler/ │ │ │ ├── TaskJobLauncherApplicationRunnerCoreTests.java │ │ │ └── TaskJobLauncherApplicationRunnerTests.java │ │ └── listener/ │ │ ├── PrefixTests.java │ │ ├── PrimaryKeyTests.java │ │ └── TaskBatchExecutionListenerTests.java │ └── resources/ │ ├── META-INF/ │ │ └── spring/ │ │ └── org.springframework.cloud.task.batch.configuration.TaskBatchTest.imports │ ├── schema-h2.sql │ └── schema-with-primary-keys-h2.sql ├── spring-cloud-task-core/ │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── springframework/ │ │ │ └── cloud/ │ │ │ └── task/ │ │ │ ├── configuration/ │ │ │ │ ├── DefaultTaskConfigurer.java │ │ │ │ ├── EnableTask.java │ │ │ │ ├── NoTransactionManagerProperty.java │ │ │ │ ├── SimpleTaskAutoConfiguration.java │ │ │ │ ├── SingleInstanceTaskListener.java │ │ │ │ ├── SingleTaskConfiguration.java │ │ │ │ ├── TaskConfigurer.java │ │ │ │ ├── TaskLifecycleConfiguration.java │ │ │ │ ├── TaskObservationCloudKeyValues.java │ │ │ │ ├── TaskProperties.java │ │ │ │ ├── TaskRepositoryDatabaseInitializerDetector.java │ │ │ │ ├── TaskRepositoryDependsOnDatabaseInitializationDetector.java │ │ │ │ ├── TaskRuntimeHints.java │ │ │ │ ├── observation/ │ │ │ │ │ ├── DefaultTaskObservationConvention.java │ │ │ │ │ ├── ObservationApplicationRunner.java │ │ │ │ │ ├── ObservationApplicationRunnerBeanPostProcessor.java │ │ │ │ │ ├── ObservationCommandLineRunner.java │ │ │ │ │ ├── ObservationCommandLineRunnerBeanPostProcessor.java │ │ │ │ │ ├── ObservationTaskAutoConfiguration.java │ │ │ │ │ ├── TaskDocumentedObservation.java │ │ │ │ │ ├── TaskObservationContext.java │ │ │ │ │ ├── TaskObservationConvention.java │ │ │ │ │ └── package-info.java │ │ │ │ └── package-info.java │ │ │ ├── listener/ │ │ │ │ ├── DefaultTaskExecutionObservationConvention.java │ │ │ │ ├── TaskException.java │ │ │ │ ├── TaskExecutionException.java │ │ │ │ ├── TaskExecutionListener.java │ │ │ │ ├── TaskExecutionListenerSupport.java │ │ │ │ ├── TaskExecutionObservation.java │ │ │ │ ├── TaskExecutionObservationContext.java │ │ │ │ ├── TaskExecutionObservationConvention.java │ │ │ │ ├── TaskLifecycleListener.java │ │ │ │ ├── TaskListenerExecutorObjectFactory.java │ │ │ │ ├── TaskObservations.java │ │ │ │ ├── annotation/ │ │ │ │ │ ├── AfterTask.java │ │ │ │ │ ├── BeforeTask.java │ │ │ │ │ ├── FailedTask.java │ │ │ │ │ ├── TaskListenerExecutor.java │ │ │ │ │ └── package-info.java │ │ │ │ └── package-info.java │ │ │ ├── package-info.java │ │ │ └── repository/ │ │ │ ├── TaskExecution.java │ │ │ ├── TaskExplorer.java │ │ │ ├── TaskNameResolver.java │ │ │ ├── TaskRepository.java │ │ │ ├── dao/ │ │ │ │ ├── JdbcTaskExecutionDao.java │ │ │ │ ├── MapTaskExecutionDao.java │ │ │ │ ├── TaskExecutionDao.java │ │ │ │ └── package-info.java │ │ │ ├── database/ │ │ │ │ ├── PagingQueryProvider.java │ │ │ │ ├── package-info.java │ │ │ │ └── support/ │ │ │ │ ├── AbstractSqlPagingQueryProvider.java │ │ │ │ ├── Db2PagingQueryProvider.java │ │ │ │ ├── H2PagingQueryProvider.java │ │ │ │ ├── HsqlPagingQueryProvider.java │ │ │ │ ├── MariaDbPagingQueryProvider.java │ │ │ │ ├── MySqlPagingQueryProvider.java │ │ │ │ ├── OraclePagingQueryProvider.java │ │ │ │ ├── PostgresPagingQueryProvider.java │ │ │ │ ├── SqlPagingQueryProviderFactoryBean.java │ │ │ │ ├── SqlPagingQueryUtils.java │ │ │ │ ├── SqlServerPagingQueryProvider.java │ │ │ │ └── package-info.java │ │ │ ├── package-info.java │ │ │ └── support/ │ │ │ ├── DatabaseType.java │ │ │ ├── SimpleTaskExplorer.java │ │ │ ├── SimpleTaskNameResolver.java │ │ │ ├── SimpleTaskRepository.java │ │ │ ├── TaskExecutionDaoFactoryBean.java │ │ │ ├── TaskRepositoryInitializer.java │ │ │ └── package-info.java │ │ └── resources/ │ │ ├── META-INF/ │ │ │ ├── additional-spring-configuration-metadata.json │ │ │ ├── spring/ │ │ │ │ ├── aot.factories │ │ │ │ └── org.springframework.boot.autoconfigure.AutoConfiguration.imports │ │ │ └── spring.factories │ │ └── org/ │ │ └── springframework/ │ │ └── cloud/ │ │ └── task/ │ │ ├── migration/ │ │ │ ├── 1.1.x/ │ │ │ │ ├── migration-h2.sql │ │ │ │ ├── migration-hsqldb.sql │ │ │ │ ├── migration-mysql.sql │ │ │ │ ├── migration-oracle.sql │ │ │ │ ├── migration-postgresql.sql │ │ │ │ └── migration-sqlserver.sql │ │ │ ├── 1.2.x/ │ │ │ │ ├── migration-db2.sql │ │ │ │ ├── migration-h2.sql │ │ │ │ ├── migration-hsqldb.sql │ │ │ │ ├── migration-mysql.sql │ │ │ │ ├── migration-oracle.sql │ │ │ │ ├── migration-postgresql.sql │ │ │ │ └── migration-sqlserver.sql │ │ │ ├── 2.2.x/ │ │ │ │ └── migration-oracle.sql │ │ │ └── 3.0.x/ │ │ │ ├── migration-db2.sql │ │ │ ├── migration-h2.sql │ │ │ ├── migration-hsqldb.sql │ │ │ ├── migration-mysql.sql │ │ │ └── migration-oracle.sql │ │ ├── schema-db2.sql │ │ ├── schema-h2.sql │ │ ├── schema-hsqldb.sql │ │ ├── schema-mariadb.sql │ │ ├── schema-mysql.sql │ │ ├── schema-oracle.sql │ │ ├── schema-postgresql.sql │ │ └── schema-sqlserver.sql │ └── test/ │ ├── java/ │ │ └── org/ │ │ └── springframework/ │ │ └── cloud/ │ │ └── task/ │ │ ├── SimpleSingleTaskAutoConfigurationTests.java │ │ ├── SimpleSingleTaskAutoConfigurationWithDataSourceTests.java │ │ ├── SimpleTaskAutoConfigurationTests.java │ │ ├── TaskCoreTests.java │ │ ├── TaskRepositoryInitializerDefaultTaskConfigurerTests.java │ │ ├── TaskRepositoryInitializerNoDataSourceTaskConfigurerTests.java │ │ ├── configuration/ │ │ │ ├── DefaultTaskConfigurerTests.java │ │ │ ├── RepositoryTransactionManagerConfigurationTests.java │ │ │ ├── TaskPropertiesTests.java │ │ │ ├── TestConfiguration.java │ │ │ └── observation/ │ │ │ └── ObservationIntegrationTests.java │ │ ├── listener/ │ │ │ ├── TaskExceptionTests.java │ │ │ ├── TaskExecutionListenerTests.java │ │ │ ├── TaskLifecycleListenerTests.java │ │ │ └── TaskListenerExecutorObjectFactoryTests.java │ │ ├── micrometer/ │ │ │ └── TaskObservationsTests.java │ │ ├── repository/ │ │ │ ├── H2TaskRepositoryIntegrationTests.java │ │ │ ├── MariaDbTaskRepositoryIntegrationTests.java │ │ │ ├── dao/ │ │ │ │ ├── BaseTaskExecutionDaoTestCases.java │ │ │ │ ├── JdbcTaskExecutionDaoMariaDBIntegrationTests.java │ │ │ │ └── TaskExecutionDaoTests.java │ │ │ ├── database/ │ │ │ │ └── support/ │ │ │ │ ├── FindAllPagingQueryProviderTests.java │ │ │ │ ├── H2PagingQueryProviderTests.java │ │ │ │ ├── InvalidPagingQueryProviderTests.java │ │ │ │ ├── SqlPagingQueryProviderFactoryBeanTests.java │ │ │ │ └── WhereClausePagingQueryProviderTests.java │ │ │ └── support/ │ │ │ ├── DatabaseTypeTests.java │ │ │ ├── SimpleTaskExplorerTests.java │ │ │ ├── SimpleTaskNameResolverTests.java │ │ │ ├── SimpleTaskRepositoryJdbcTests.java │ │ │ ├── SimpleTaskRepositoryMapTests.java │ │ │ ├── TaskDatabaseInitializerTests.java │ │ │ └── TaskExecutionDaoFactoryBeanTests.java │ │ └── util/ │ │ ├── TaskExecutionCreator.java │ │ ├── TestDBUtils.java │ │ ├── TestDefaultConfiguration.java │ │ ├── TestListener.java │ │ └── TestVerifierUtils.java │ └── resources/ │ ├── application.properties │ └── micrometer/ │ └── pcf-scs-info.json ├── spring-cloud-task-dependencies/ │ └── pom.xml ├── spring-cloud-task-integration-tests/ │ ├── pom.xml │ └── src/ │ └── test/ │ ├── java/ │ │ ├── configuration/ │ │ │ ├── JobConfiguration.java │ │ │ ├── JobSkipConfiguration.java │ │ │ ├── SkipItemReader.java │ │ │ └── SkipItemWriter.java │ │ └── org/ │ │ └── springframework/ │ │ └── cloud/ │ │ └── task/ │ │ ├── executionid/ │ │ │ ├── TaskStartApplication.java │ │ │ └── TaskStartTests.java │ │ ├── initializer/ │ │ │ └── TaskInitializerTests.java │ │ └── listener/ │ │ ├── BatchExecutionEventTests.java │ │ └── TaskEventTests.java │ └── resources/ │ ├── application.properties │ └── org/ │ └── springframework/ │ └── cloud/ │ └── task/ │ └── listener/ │ ├── chunk-events-sink-channel.properties │ ├── item-process-sink-channel.properties │ ├── item-read-events-sink-channel.properties │ ├── item-write-events-sink-channel.properties │ ├── job-execution-sink-channel.properties │ ├── sink-channel.properties │ ├── skip-events-sink-channel.properties │ └── step-execution-sink-channel.properties ├── spring-cloud-task-samples/ │ ├── batch-events/ │ │ ├── .mvn/ │ │ │ └── wrapper/ │ │ │ ├── maven-wrapper.jar │ │ │ └── maven-wrapper.properties │ │ ├── README.adoc │ │ ├── mvnw │ │ ├── mvnw.cmd │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── io/ │ │ │ │ └── spring/ │ │ │ │ └── cloud/ │ │ │ │ ├── BatchEventsApplication.java │ │ │ │ └── package-info.java │ │ │ └── resources/ │ │ │ ├── application.properties │ │ │ └── logback-test.xml │ │ └── test/ │ │ ├── java/ │ │ │ └── io/ │ │ │ └── spring/ │ │ │ └── cloud/ │ │ │ └── BatchEventsApplicationTests.java │ │ └── resources/ │ │ └── io/ │ │ └── spring/ │ │ └── task/ │ │ └── listener/ │ │ └── job-listener-sink-channel.properties │ ├── batch-job/ │ │ ├── .mvn/ │ │ │ └── wrapper/ │ │ │ ├── maven-wrapper.jar │ │ │ └── maven-wrapper.properties │ │ ├── README.adoc │ │ ├── mvnw │ │ ├── mvnw.cmd │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── io/ │ │ │ │ └── spring/ │ │ │ │ ├── BatchJobApplication.java │ │ │ │ ├── configuration/ │ │ │ │ │ ├── JobConfiguration.java │ │ │ │ │ └── package-info.java │ │ │ │ └── package-info.java │ │ │ └── resources/ │ │ │ └── application.properties │ │ └── test/ │ │ ├── java/ │ │ │ └── io/ │ │ │ └── spring/ │ │ │ ├── BatchJobApplicationTests.java │ │ │ ├── BatchJobTestConfiguration.java │ │ │ └── TestBatchJobApp.java │ │ └── resources/ │ │ └── application.properties │ ├── jpa-sample/ │ │ ├── .mvn/ │ │ │ └── wrapper/ │ │ │ ├── maven-wrapper.jar │ │ │ └── maven-wrapper.properties │ │ ├── README.adoc │ │ ├── mvnw │ │ ├── mvnw.cmd │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── io/ │ │ │ │ └── spring/ │ │ │ │ ├── JpaApplication.java │ │ │ │ ├── configuration/ │ │ │ │ │ ├── TaskRunComponent.java │ │ │ │ │ ├── TaskRunOutput.java │ │ │ │ │ ├── TaskRunRepository.java │ │ │ │ │ └── package-info.java │ │ │ │ └── package-info.java │ │ │ └── resources/ │ │ │ ├── application-cloud.yml │ │ │ └── application.yml │ │ └── test/ │ │ ├── java/ │ │ │ └── io/ │ │ │ └── spring/ │ │ │ └── JpaApplicationTests.java │ │ └── resources/ │ │ └── application.properties │ ├── multiple-datasources/ │ │ ├── .mvn/ │ │ │ └── wrapper/ │ │ │ ├── maven-wrapper.jar │ │ │ └── maven-wrapper.properties │ │ ├── README.adoc │ │ ├── mvnw │ │ ├── mvnw.cmd │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── io/ │ │ │ │ └── spring/ │ │ │ │ ├── MultipleDataSourcesApplication.java │ │ │ │ ├── configuration/ │ │ │ │ │ ├── CustomTaskConfigurer.java │ │ │ │ │ ├── EmbeddedDataSourceConfiguration.java │ │ │ │ │ ├── ExternalDataSourceConfiguration.java │ │ │ │ │ └── package-info.java │ │ │ │ ├── package-info.java │ │ │ │ └── task/ │ │ │ │ ├── SampleCommandLineRunner.java │ │ │ │ └── package-info.java │ │ │ └── resources/ │ │ │ └── application.properties │ │ └── test/ │ │ ├── java/ │ │ │ └── io/ │ │ │ └── spring/ │ │ │ ├── MultiDataSourcesApplicationTests.java │ │ │ └── MultiDataSourcesExternalApplicationTests.java │ │ └── resources/ │ │ └── application.properties │ ├── pom.xml │ ├── single-step-batch-job/ │ │ ├── .mvn/ │ │ │ └── wrapper/ │ │ │ ├── maven-wrapper.jar │ │ │ └── maven-wrapper.properties │ │ ├── README.adoc │ │ ├── mvnw │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── io/ │ │ │ │ └── spring/ │ │ │ │ ├── SingleStepBatchJobApplication.java │ │ │ │ └── package-info.java │ │ │ └── resources/ │ │ │ ├── application-amqpreader.properties │ │ │ ├── application-amqpwriter.properties │ │ │ ├── application-ffreader.properties │ │ │ ├── application-ffwriter.properties │ │ │ ├── application-jdbcreader.properties │ │ │ ├── application-jdbcwriter.properties │ │ │ ├── application-kafkareader.properties │ │ │ ├── application-kafkawriter.properties │ │ │ ├── application.properties │ │ │ └── test.txt │ │ └── test/ │ │ ├── java/ │ │ │ └── io/ │ │ │ └── spring/ │ │ │ └── BatchJobApplicationTests.java │ │ └── resources/ │ │ ├── schema-h2.sql │ │ ├── test.txt │ │ └── testresult.txt │ ├── task-events/ │ │ ├── .mvn/ │ │ │ └── wrapper/ │ │ │ ├── maven-wrapper.jar │ │ │ └── maven-wrapper.properties │ │ ├── README.adoc │ │ ├── mvnw │ │ ├── mvnw.cmd │ │ ├── pom.xml │ │ └── src/ │ │ └── main/ │ │ ├── java/ │ │ │ └── io/ │ │ │ └── spring/ │ │ │ ├── TaskEventsApplication.java │ │ │ └── package-info.java │ │ └── resources/ │ │ └── application.properties │ ├── task-observations/ │ │ ├── .gitignore │ │ ├── .mvn/ │ │ │ └── wrapper/ │ │ │ ├── maven-wrapper.jar │ │ │ └── maven-wrapper.properties │ │ ├── README.adoc │ │ ├── mvnw │ │ ├── mvnw.cmd │ │ ├── pom.xml │ │ └── src/ │ │ ├── main/ │ │ │ ├── java/ │ │ │ │ └── io/ │ │ │ │ └── spring/ │ │ │ │ └── taskobservations/ │ │ │ │ ├── ObservationConfiguration.java │ │ │ │ ├── TaskObservationsApplication.java │ │ │ │ └── package-info.java │ │ │ └── resources/ │ │ │ └── application.properties │ │ └── test/ │ │ └── java/ │ │ └── io/ │ │ └── spring/ │ │ └── taskobservations/ │ │ └── TaskObservationsApplicationTests.java │ └── timestamp/ │ ├── README.adoc │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── springframework/ │ │ │ └── cloud/ │ │ │ └── task/ │ │ │ └── timestamp/ │ │ │ ├── TaskApplication.java │ │ │ ├── TimestampTaskProperties.java │ │ │ └── package-info.java │ │ └── resources/ │ │ └── application.properties │ └── test/ │ ├── java/ │ │ └── org/ │ │ └── springframework/ │ │ └── cloud/ │ │ └── task/ │ │ └── timestamp/ │ │ ├── TaskApplicationTests.java │ │ └── TimestampTaskPropertiesTests.java │ └── resources/ │ └── application.properties ├── spring-cloud-task-stream/ │ ├── .mvn/ │ │ └── wrapper/ │ │ ├── maven-wrapper.jar │ │ └── maven-wrapper.properties │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── org/ │ │ │ └── springframework/ │ │ │ └── cloud/ │ │ │ └── task/ │ │ │ ├── batch/ │ │ │ │ └── listener/ │ │ │ │ ├── BatchEventAutoConfiguration.java │ │ │ │ ├── EventEmittingChunkListener.java │ │ │ │ ├── EventEmittingItemProcessListener.java │ │ │ │ ├── EventEmittingItemReadListener.java │ │ │ │ ├── EventEmittingItemWriteListener.java │ │ │ │ ├── EventEmittingJobExecutionListener.java │ │ │ │ ├── EventEmittingSkipListener.java │ │ │ │ ├── EventEmittingStepExecutionListener.java │ │ │ │ ├── package-info.java │ │ │ │ └── support/ │ │ │ │ ├── BatchJobHeaders.java │ │ │ │ ├── ExitStatus.java │ │ │ │ ├── JobExecutionEvent.java │ │ │ │ ├── JobInstanceEvent.java │ │ │ │ ├── JobParameterEvent.java │ │ │ │ ├── JobParametersEvent.java │ │ │ │ ├── MessagePublisher.java │ │ │ │ ├── StepExecutionEvent.java │ │ │ │ ├── TaskBatchEventListenerBeanPostProcessor.java │ │ │ │ ├── TaskEventProperties.java │ │ │ │ └── package-info.java │ │ │ └── listener/ │ │ │ ├── TaskEventAutoConfiguration.java │ │ │ └── package-info.java │ │ └── resources/ │ │ ├── META-INF/ │ │ │ ├── additional-spring-configuration-metadata.json │ │ │ └── spring/ │ │ │ └── org.springframework.boot.autoconfigure.AutoConfiguration.imports │ │ └── org/ │ │ └── springframework/ │ │ └── cloud/ │ │ └── task/ │ │ └── application.properties │ └── test/ │ └── java/ │ └── org/ │ └── springframework/ │ └── cloud/ │ └── task/ │ ├── batch/ │ │ └── listener/ │ │ ├── EventListenerTests.java │ │ ├── JobExecutionEventTests.java │ │ ├── JobInstanceEventTests.java │ │ ├── JobParameterEventTests.java │ │ ├── JobParametersEventTests.java │ │ ├── StepExecutionEventTests.java │ │ ├── TaskBatchEventListenerBeanPostProcessorTests.java │ │ └── support/ │ │ └── TaskBatchEventListenerBeanPostProcessorRuntimeHintTests.java │ └── listener/ │ └── TaskEventTests.java └── src/ └── checkstyle/ └── checkstyle-suppressions.xml ================================================ FILE CONTENTS ================================================ ================================================ FILE: .editorconfig ================================================ # EditorConfig is awesome: https://EditorConfig.org # top-most EditorConfig file root = true [*] indent_style = tab indent_size = 4 end_of_line = lf insert_final_newline = true [*.yml] indent_style = space indent_size = 2 ================================================ FILE: .github/dco.yml ================================================ require: members: false ================================================ FILE: .github/workflows/ci-maven-main.yml ================================================ name: CI PRs on: push: branches: - main pull_request: branches: - main jobs: build: runs-on: ubuntu-latest name: CI PR Build steps: - uses: actions/checkout@v2 - uses: actions/setup-java@v2 with: distribution: adopt java-version: 17 - run: mvn "-Dmaven.repo.local=.m2" -U -B package -s .settings.xml ================================================ FILE: .github/workflows/ci.yaml ================================================ name: Spring Cloud Task CI Job on: push: branches: - main - 4.3.x # Scheduled builds run daily at midnight UTC schedule: - cron: '0 0 * * *' # Manual trigger with optional branch override workflow_dispatch: inputs: branches: description: "Which branch should be built (can be a comma-separated list of branches)" required: true default: 'main' type: string jobs: deploy: uses: spring-cloud/spring-cloud-github-actions/.github/workflows/deploy.yml@main with: branches: ${{ inputs.branches }} secrets: ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} COMMERCIAL_ARTIFACTORY_USERNAME: ${{ secrets.COMMERCIAL_ARTIFACTORY_USERNAME }} COMMERCIAL_ARTIFACTORY_PASSWORD: ${{ secrets.COMMERCIAL_ARTIFACTORY_PASSWORD }} DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} ================================================ FILE: .github/workflows/deploy-docs.yml ================================================ name: Deploy Docs on: push: branches-ignore: [ gh-pages ] tags: '**' repository_dispatch: types: request-build-reference # legacy #schedule: #- cron: '0 10 * * *' # Once per day at 10am UTC workflow_dispatch: permissions: actions: write jobs: build: runs-on: ubuntu-latest # if: github.repository_owner == 'spring-cloud' steps: - name: Checkout uses: actions/checkout@v3 with: ref: docs-build fetch-depth: 1 - name: Dispatch (partial build) if: github.ref_type == 'branch' env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: gh workflow run deploy-docs.yml -r $(git rev-parse --abbrev-ref HEAD) -f build-refname=${{ github.ref_name }} - name: Dispatch (full build) if: github.ref_type == 'tag' env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: gh workflow run deploy-docs.yml -r $(git rev-parse --abbrev-ref HEAD) ================================================ FILE: .gitignore ================================================ *~ .#* *# *.sw* _site/ .factorypath .gradletasknamecache .DS_Store /application.yml /application.properties asciidoctor.css atlassian-ide-plugin.xml bin/ build/ dump.rdb out spring-shell.log target/ test-output result.txt .flattened-pom.xml # Eclipse artifacts, including WTP generated manifests .classpath .project .settings/ .springBeans spring-*/src/main/java/META-INF/MANIFEST.MF # IDEA artifacts and output dirs *.iml *.ipr *.iws .idea/* # Github Actions .m2 node node_modules build /package.json package-lock.json ================================================ FILE: .mvn/jvm.config ================================================ -Xmx1024m -XX:CICompilerCount=1 -XX:TieredStopAtLevel=1 -Djava.security.egd=file:/dev/./urandom ================================================ FILE: .mvn/maven.config ================================================ -DaltSnapshotDeploymentRepository=repo.spring.io::default::https://repo.spring.io/libs-snapshot-local -P spring ================================================ FILE: .mvn/wrapper/maven-wrapper.properties ================================================ # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.1/apache-maven-3.9.1-bin.zip wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar ================================================ FILE: .settings.xml ================================================ repo.spring.io ${env.CI_DEPLOY_USERNAME} ${env.CI_DEPLOY_PASSWORD} spring true spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false io.spring.javaformat ================================================ FILE: .springformat ================================================ ================================================ FILE: CONTRIBUTING.adoc ================================================ = Contributing to Spring Cloud Task :github: https://github.com/spring-cloud/spring-cloud-task Spring Cloud Task is released under the Apache 2.0 license. If you would like to contribute something, or want to hack on the code this document should help you get started. == Using GitHub Issues We use GitHub issues to track bugs and enhancements. If you have a general usage question please ask on https://stackoverflow.com[Stack Overflow]. The Spring Cloud Task team and the broader community monitor the https://stackoverflow.com/tags/spring-cloud-Task[`spring-cloud-task`] tag. If you are reporting a bug, please help to speed up problem diagnosis by providing as much information as possible. Ideally, that would include a small sample project that reproduces the problem. == Reporting Security Vulnerabilities If you think you have found a security vulnerability in Spring Cloud Task please *DO NOT* disclose it publicly until we've had a chance to fix it. Please don't report security vulnerabilities using GitHub issues, instead head over to https://spring.io/security-policy and learn how to disclose them responsibly. == Developer Certificate of Origin All commits must include a **Signed-off-by** trailer at the end of each commit message to indicate that the contributor agrees to the Developer Certificate of Origin. For additional details, please refer to the blog post https://spring.io/blog/2025/01/06/hello-dco-goodbye-cla-simplifying-contributions-to-spring[Hello DCO, Goodbye CLA: Simplifying Contributions to Spring]. === Code Conventions and Housekeeping None of the following guidelines is essential for a pull request, but they all help your fellow developers understand and work with your code. They can also be added after the original pull request but before a merge. * Use the Spring Framework code format conventions. If you use Eclipse, you can import formatter settings by using the `eclipse-code-formatter.xml` file from the https://github.com/spring-cloud/spring-cloud-build/blob/master/spring-cloud-dependencies-parent/eclipse-code-formatter.xml[Spring Cloud Build] project. If you use IntelliJ, you can use the https://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter Plugin] to import the same file. * Make sure all new `.java` files have a simple Javadoc class comment with at least an `@author` tag identifying you, and preferably at least a paragraph describing the class's purpose. * Add the ASF license header comment to all new `.java` files (to do so, copy it from existing files in the project). * Add yourself as an `@author` to the .java files that you modify substantially (more than cosmetic changes). * Add some Javadocs and, if you change the namespace, some XSD doc elements. * A few unit tests would help a lot as well. Someone has to do it, and your fellow developers appreciate the effort. * If no one else uses your branch, rebase it against the current master (or other target branch in the main project). * When writing a commit message, follow https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions]. If you fix an existing issue, add `Fixes gh-XXXX` (where XXXX is the issue number) at the end of the commit message. ================================================ FILE: LICENSE ================================================ Apache License Version 2.0, January 2004 https://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: README.adoc ================================================ //// DO NOT EDIT THIS FILE. IT WAS GENERATED. Manual changes to this file will be lost when it is generated again. Edit the files in the src/main/asciidoc/ directory instead. //// [[spring-cloud-task]] = Spring Cloud Task Is a project centered around the idea of processing on demand. A user is able to develop a “task” that can be deployed, executed and removed on demand, yet the result of the process persists beyond the life of the task for future reporting. [[requirements:]] == Requirements: * Java 17 or Above [[build-main-project:]] == Build Main Project: [source,shell,indent=2] ---- $ ./mvnw clean install ---- [[example:]] == Example: [source,java,indent=2] ---- @SpringBootApplication @EnableTask public class MyApp { @Bean public MyTaskApplication myTask() { return new MyTaskApplication(); } public static void main(String[] args) { SpringApplication.run(MyApp.class); } public static class MyTaskApplication implements ApplicationRunner { @Override public void run(ApplicationArguments args) throws Exception { System.out.println("Hello World"); } } } ---- [[code-of-conduct]] == Code of Conduct This project adheres to the Contributor Covenant link:CODE_OF_CONDUCT.adoc[code of conduct]. By participating, you are expected to uphold this code. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io. [[building-the-project]] == Building the Project This project requires that you invoke the Javadoc engine from the Maven command line. You can do so by appending `javadoc:aggregate` to the rest of your Maven command. For example, to build the entire project, you could use `mvn clean install -DskipTests -P docs`. ================================================ FILE: docs/antora-playbook.yml ================================================ antora: extensions: - require: '@springio/antora-extensions' root_component_name: 'cloud-task' site: title: Spring Cloud Task url: https://docs.spring.io/spring-cloud-task/reference/ content: sources: - url: ./.. branches: HEAD start_path: docs worktrees: true asciidoc: attributes: page-stackoverflow-url: https://stackoverflow.com/tags/spring-cloud-task page-pagination: '' hide-uri-scheme: '@' tabs-sync-option: '@' chomp: 'all' extensions: - '@asciidoctor/tabs' - '@springio/asciidoctor-extensions' sourcemap: true urls: latest_version_segment: '' runtime: log: failure_level: warn format: pretty ui: bundle: url: https://github.com/spring-io/antora-ui-spring/releases/download/v0.4.15/ui-bundle.zip ================================================ FILE: docs/antora.yml ================================================ name: cloud-task version: true title: spring-cloud-task nav: - modules/ROOT/nav.adoc ext: collector: run: command: ./mvnw --no-transfer-progress -B process-resources -Pdocs -pl docs -Dantora-maven-plugin.phase=none -Dgenerate-docs.phase=none -Dgenerate-readme.phase=none -Dgenerate-cloud-resources.phase=none -Dmaven-dependency-plugin-for-docs.phase=none -Dmaven-dependency-plugin-for-docs-classes.phase=none -DskipTests -DdisableConfigurationProperties local: true scan: dir: ./target/classes/antora-resources/ ================================================ FILE: docs/modules/ROOT/nav.adoc ================================================ * xref:index.adoc[Introduction] * xref:getting-started.adoc[] * xref:features.adoc[] * xref:batch.adoc[] * xref:batch-starter.adoc[] * xref:stream.adoc[] * xref:appendix.adoc[] ** xref:appendix-task-repository-schema.adoc[] ** xref:appendix-building-the-documentation.adoc[] ** xref:observability.adoc[] ================================================ FILE: docs/modules/ROOT/pages/_attributes.adoc ================================================ :doctype: book :idprefix: :idseparator: - :tabsize: 4 :numbered: :sectanchors: :sectnums: :icons: font :hide-uri-scheme: :docinfo: shared,private :sc-ext: java :project-full-name: Spring Cloud Task // project-specific attributes :spring-cloud-task-repo: snapshot :github-tag: master :spring-cloud-task-docs-version: current :spring-cloud-task-docs: https://docs.spring.io/spring-cloud-task/docs/{spring-cloud-task-docs-version}/reference :spring-cloud-task-docs-current: https://docs.spring.io/spring-cloud-task/reference/ :github-repo: spring-cloud/spring-cloud-task :github-raw: https://raw.github.com/{github-repo}/{github-tag} :github-code: https://github.com/{github-repo}/tree/{github-tag} :github-wiki: https://github.com/{github-repo}/wiki :github-master-code: https://github.com/{github-repo}/tree/master :sc-ext: java :sc-spring-boot: {github-code}/spring-boot/src/main/java/org/springframework/boot :dc-ext: html :dc-root: https://docs.spring.io/spring-cloud-task/docs/{spring-cloud-dataflow-docs-version}/api :dc-spring-boot: {dc-root}/org/springframework/boot :dependency-management-plugin: https://github.com/spring-gradle-plugins/dependency-management-plugin :dependency-management-plugin-documentation: {dependency-management-plugin}/blob/master/README.md :spring-boot-maven-plugin-site: https://docs.spring.io/spring-boot/docs/{spring-boot-docs-version}/maven-plugin :spring-reference: https://docs.spring.io/spring/docs/{spring-docs-version}/spring-framework-reference/htmlsingle :spring-security-reference: https://docs.spring.io/spring-security/site/docs/{spring-security-docs-version}/reference/htmlsingle :spring-javadoc: https://docs.spring.io/spring/docs/{spring-docs-version}/javadoc-api/org/springframework :spring-amqp-javadoc: https://docs.spring.io/spring-amqp/docs/current/api/org/springframework/amqp :spring-data-javadoc: https://docs.spring.io/spring-data/jpa/docs/current/api/org/springframework/data/jpa :spring-data-commons-javadoc: https://docs.spring.io/spring-data/commons/docs/current/api/org/springframework/data :spring-data-mongo-javadoc: https://docs.spring.io/spring-data/mongodb/docs/current/api/org/springframework/data/mongodb :spring-data-rest-javadoc: https://docs.spring.io/spring-data/rest/docs/current/api/org/springframework/data/rest :gradle-userguide: https://www.gradle.org/docs/current/userguide :propdeps-plugin: https://github.com/spring-projects/gradle-plugins/tree/master/propdeps-plugin :ant-manual: https://ant.apache.org/manual :attributes: allow-uri-read ================================================ FILE: docs/modules/ROOT/pages/appendix-building-the-documentation.adoc ================================================ [[appendix-building-the-documentation]] = Building This Documentation :page-section-summary-toc: 1 This project uses Maven to generate this documentation. To generate it for yourself, run the following command: `$ mvn clean install -DskipTests -P docs`. ================================================ FILE: docs/modules/ROOT/pages/appendix-task-repository-schema.adoc ================================================ [[appendix-task-repository-schema]] = Task Repository Schema [[partintro]] -- This appendix provides an ERD for the database schema used in the task repository. -- image::task_schema.png[] [[table-information]] == Table Information -- .TASK_EXECUTION Stores the task execution information. [width="80%", cols="1,1,1,1,10", options="header"] |========================================================= |Column Name |Required |Type |Field Length |Notes |TASK_EXECUTION_ID |TRUE |BIGINT | X | Spring Cloud Task Framework at app startup establishes the next available id as obtained from the `TASK_SEQ`. Or if the record is created outside of task then the value must be populated at record creation time. |START_TIME |FALSE | DATETIME(6) | X | Spring Cloud Task Framework at app startup establishes the value. |END_TIME |FALSE | DATETIME(6) | X | Spring Cloud Task Framework at app exit establishes the value. |TASK_NAME |FALSE | VARCHAR | 100 | Spring Cloud Task Framework at app startup will set this to "Application" unless user establish the name using the `spring.application.name`. |EXIT_CODE |FALSE | INTEGER | X | Follows Spring Boot defaults unless overridden by the user as discussed https://docs.spring.io/spring-cloud-task/docs/current/reference/#features-lifecycle-exit-codes[here]. |EXIT_MESSAGE |FALSE | VARCHAR | 2500 | User Defined as discussed https://docs.spring.io/spring-cloud-task/docs/current/reference/#features-task-execution-listener-exit-messages[here]. |ERROR_MESSAGE |FALSE | VARCHAR | 2500 | Spring Cloud Task Framework at app exit establishes the value. |LAST_UPDATED |TRUE | TIMESTAMP | X | Spring Cloud Task Framework at app startup establishes the value. Or if the record is created outside of task then the value must be populated at record creation time. |EXTERNAL_EXECUTION_ID |FALSE | VARCHAR | 250 | If the `spring.cloud.task.external-execution-id` property is set then Spring Cloud Task Framework at app startup will set this to the value specified. More information can be found xref:features.adoc#features-external_task_id[here] |PARENT_TASK_EXECUTION_ID |FALSE |BIGINT | X | If the `spring.cloud.task.parent-execution-id` property is set then Spring Cloud Task Framework at app startup will set this to the value specified. More information can be found xref:features.adoc#features-parent_task_id[here] |========================================================= .TASK_EXECUTION_PARAMS Stores the parameters used for a task execution [width="80%", cols="1,1,1,1", options="header"] |========================================================= |Column Name |Required |Type |Field Length |TASK_EXECUTION_ID |TRUE |BIGINT | X |TASK_PARAM |FALSE | VARCHAR | 2500 |========================================================= .TASK_TASK_BATCH Used to link the task execution to the batch execution. [width="80%", cols="1,1,1,1", options="header"] |========================================================= |Column Name |Required |Type |Field Length |TASK_EXECUTION_ID |TRUE |BIGINT | X |JOB_EXECUTION_ID |TRUE | BIGINT | X |========================================================= .TASK_LOCK Used for the `single-instance-enabled` feature discussed xref:features.adoc#features-single-instance-enabled[here]. [width="80%", cols="1,1,1,1,10", options="header"] |========================================================= |Column Name |Required |Type | Field Length |Notes |LOCK_KEY |TRUE |CHAR | 36 | UUID for the this lock |REGION |TRUE | VARCHAR | 100 | User can establish a group of locks using this field. |CLIENT_ID |TRUE | CHAR | 36 | The task execution id that contains the name of the app to lock. |CREATED_DATE |TRUE | DATETIME | X | The date that the entry was created |========================================================= NOTE: The DDL for setting up tables for each database type can be found https://github.com/spring-cloud/spring-cloud-task/tree/master/spring-cloud-task-core/src/main/resources/org/springframework/cloud/task[here]. -- [[sql-server]] == SQL Server By default Spring Cloud Task uses a sequence table for determining the `TASK_EXECUTION_ID` for the `TASK_EXECUTION` table. However, when launching multiple tasks simultaneously while using SQL Server, this can cause a deadlock to occur on the `TASK_SEQ` table. The resolution is to drop the `TASK_EXECUTION_SEQ` table and create a sequence using the same name. For example: ``` DROP TABLE TASK_SEQ; CREATE SEQUENCE [DBO].[TASK_SEQ] AS BIGINT START WITH 1 INCREMENT BY 1; ``` NOTE: Set the `START WITH` to a higher value than your current execution id. ================================================ FILE: docs/modules/ROOT/pages/appendix.adoc ================================================ [[appendix]] = Appendices :page-section-summary-toc: 1 ifndef::train-docs[] endif::[] ================================================ FILE: docs/modules/ROOT/pages/batch-starter.adoc ================================================ [[batch-job-starter]] = Single Step Batch Job Starter [[partintro]] -- This section goes into how to develop a Spring Batch `Job` with a single `Step` by using the starter included in Spring Cloud Task. This starter lets you use configuration to define an `ItemReader`, an `ItemWriter`, or a full single-step Spring Batch `Job`. For more about Spring Batch and its capabilities, see the https://spring.io/projects/spring-batch[Spring Batch documentation]. -- To obtain the starter for Maven, add the following to your build: [source,xml] ---- org.springframework.cloud spring-cloud-starter-single-step-batch-job 2.3.0 ---- To obtain the starter for Gradle, add the following to your build: [source,groovy] ---- compile "org.springframework.cloud:spring-cloud-starter-single-step-batch-job:2.3.0" ---- [[job-definition]] == Defining a Job You can use the starter to define as little as an `ItemReader` or an `ItemWriter` or as much as a full `Job`. In this section, we define which properties are required to be defined to configure a `Job`. [[job-definition-properties]] === Properties To begin, the starter provides a set of properties that let you configure the basics of a Job with one Step: .Job Properties |=== | Property | Type | Default Value | Description | `spring.batch.job.jobName` | `String` | `null` | The name of the job. | `spring.batch.job.stepName` | `String` | `null` | The name of the step. | `spring.batch.job.chunkSize` | `Integer` | `null` | The number of items to be processed per transaction. |=== With the above properties configured, you have a job with a single, chunk-based step. This chunk-based step reads, processes, and writes `Map` instances as the items. However, the step does not yet do anything. You need to configure an `ItemReader`, an optional `ItemProcessor`, and an `ItemWriter` to give it something to do. To configure one of these, you can either use properties and configure one of the options that has provided autoconfiguration or you can configure your own with the standard Spring configuration mechanisms. NOTE: If you configure your own, the input and output types must match the others in the step. The `ItemReader` implementations and `ItemWriter` implementations in this starter all use a `Map` as the input and the output item. [[item-readers]] == Autoconfiguration for ItemReader Implementations This starter provides autoconfiguration for four different `ItemReader` implementations: `AmqpItemReader`, `FlatFileItemReader`, `JdbcCursorItemReader`, and `KafkaItemReader`. In this section, we outline how to configure each of these by using the provided autoconfiguration. [[amqpitemreader]] === AmqpItemReader You can read from a queue or topic with AMQP by using the `AmqpItemReader`. The autoconfiguration for this `ItemReader` implementation is dependent upon two sets of configuration. The first is the configuration of an `AmqpTemplate`. You can either configure this yourself or use the autoconfiguration provided by Spring Boot. See the https://docs.spring.io/spring-boot/docs/3.0.x/reference/htmlsingle/#messaging.amqp.rabbitmq[Spring Boot AMQP documentation]. Once you have configured the `AmqpTemplate`, you can enable the batch capabilities to support it by setting the following properties: .`AmqpItemReader` Properties |=== | Property | Type | Default Value | Description | `spring.batch.job.amqpitemreader.enabled` | `boolean` | `false` | If `true`, the autoconfiguration will execute. | `spring.batch.job.amqpitemreader.jsonConverterEnabled` | `boolean` | `true` | Indicates if the `Jackson2JsonMessageConverter` should be registered to parse messages. |=== For more information, see the https://docs.spring.io/spring-batch/docs/4.3.x/api/org/springframework/batch/item/amqp/AmqpItemReader.html[`AmqpItemReader` documentation]. [[flatfileitemreader]] === FlatFileItemReader `FlatFileItemReader` lets you read from flat files (such as CSVs and other file formats). To read from a file, you can provide some components yourself through normal Spring configuration (`LineTokenizer`, `RecordSeparatorPolicy`, `FieldSetMapper`, `LineMapper`, or `SkippedLinesCallback`). You can also use the following properties to configure the reader: .`FlatFileItemReader` Properties |=== | Property | Type | Default Value | Description | `spring.batch.job.flatfileitemreader.saveState` | `boolean` | `true` | Determines if the state should be saved for restarts. | `spring.batch.job.flatfileitemreader.name` | `String` | `null` | Name used to provide unique keys in the `ExecutionContext`. | `spring.batch.job.flatfileitemreader.maxItemcount` | `int` | `Integer.MAX_VALUE` | Maximum number of items to be read from the file. | `spring.batch.job.flatfileitemreader.currentItemCount` | `int` | 0 | Number of items that have already been read. Used on restarts. | `spring.batch.job.flatfileitemreader.comments` | `List` | empty List | A list of Strings that indicate commented lines (lines to be ignored) in the file. | `spring.batch.job.flatfileitemreader.resource` | `Resource` | `null` | The resource to be read. | `spring.batch.job.flatfileitemreader.strict` | `boolean` | `true` | If set to `true`, the reader throws an exception if the resource is not found. | `spring.batch.job.flatfileitemreader.encoding` | `String` | `FlatFileItemReader.DEFAULT_CHARSET` | Encoding to be used when reading the file. | `spring.batch.job.flatfileitemreader.linesToSkip` | `int` | 0 | Indicates the number of lines to skip at the start of a file. | `spring.batch.job.flatfileitemreader.delimited` | `boolean` | `false` | Indicates whether the file is a delimited file (CSV and other formats). Only one of this property or `spring.batch.job.flatfileitemreader.fixedLength` can be `true` at the same time. | `spring.batch.job.flatfileitemreader.delimiter` | `String` | `DelimitedLineTokenizer.DELIMITER_COMMA` | If reading a delimited file, indicates the delimiter to parse on. | `spring.batch.job.flatfileitemreader.quoteCharacter` | `char` | `DelimitedLineTokenizer.DEFAULT_QUOTE_CHARACTER` | Used to determine the character used to quote values. | `spring.batch.job.flatfileitemreader.includedFields` | `List` | empty list | A list of indices to determine which fields in a record to include in the item. | `spring.batch.job.flatfileitemreader.fixedLength` | `boolean` | `false` | Indicates if a file's records are parsed by column numbers. Only one of this property or `spring.batch.job.flatfileitemreader.delimited` can be `true` at the same time. | `spring.batch.job.flatfileitemreader.ranges` | `List` | empty list | List of column ranges by which to parse a fixed width record. See the https://docs.spring.io/spring-batch/docs/4.3.x/api/org/springframework/batch/item/file/transform/Range.html[Range documentation]. | `spring.batch.job.flatfileitemreader.names` | `String []` | `null` | List of names for each field parsed from a record. These names are the keys in the `Map` in the items returned from this `ItemReader`. | `spring.batch.job.flatfileitemreader.parsingStrict` | `boolean` | `true` | If set to `true`, the mapping fails if the fields cannot be mapped. |=== See the https://docs.spring.io/spring-batch/docs/4.3.x/api/org/springframework/batch/item/file/FlatFileItemReader.html[`FlatFileItemReader` documentation]. [[jdbcCursorItemReader]] === JdbcCursorItemReader The `JdbcCursorItemReader` runs a query against a relational database and iterates over the resulting cursor (`ResultSet`) to provide the resulting items. This autoconfiguration lets you provide a `PreparedStatementSetter`, a `RowMapper`, or both. You can also use the following properties to configure a `JdbcCursorItemReader`: .`JdbcCursorItemReader` Properties |=== | Property | Type | Default Value | Description | `spring.batch.job.jdbccursoritemreader.saveState` | `boolean` | `true` | Determines whether the state should be saved for restarts. | `spring.batch.job.jdbccursoritemreader.name` | `String` | `null` | Name used to provide unique keys in the `ExecutionContext`. | `spring.batch.job.jdbccursoritemreader.maxItemcount` | `int` | `Integer.MAX_VALUE` | Maximum number of items to be read from the file. | `spring.batch.job.jdbccursoritemreader.currentItemCount` | `int` | 0 | Number of items that have already been read. Used on restarts. | `spring.batch.job.jdbccursoritemreader.fetchSize` | `int` | | A hint to the driver to indicate how many records to retrieve per call to the database system. For best performance, you usually want to set it to match the chunk size. | `spring.batch.job.jdbccursoritemreader.maxRows` | `int` | | Maximum number of items to read from the database. | `spring.batch.job.jdbccursoritemreader.queryTimeout` | `int` | | Number of milliseconds for the query to timeout. | `spring.batch.job.jdbccursoritemreader.ignoreWarnings` | `boolean` | `true` | Determines whether the reader should ignore SQL warnings when processing. | `spring.batch.job.jdbccursoritemreader.verifyCursorPosition` | `boolean` | `true` | Indicates whether the cursor's position should be verified after each read to verify that the `RowMapper` did not advance the cursor. | `spring.batch.job.jdbccursoritemreader.driverSupportsAbsolute` | `boolean` | `false` | Indicates whether the driver supports absolute positioning of a cursor. | `spring.batch.job.jdbccursoritemreader.useSharedExtendedConnection` | `boolean` | `false` | Indicates whether the connection is shared with other processing (and is therefore part of a transaction). | `spring.batch.job.jdbccursoritemreader.sql` | `String` | `null` | SQL query from which to read. |=== You can also specify JDBC DataSource specifically for the reader by using the following properties: .`JdbcCursorItemReader` Properties |=== | Property | Type | Default Value | Description | `spring.batch.job.jdbccursoritemreader.datasource.enable` | `boolean` | `false` | Determines whether `JdbcCursorItemReader` `DataSource` should be enabled. | `jdbccursoritemreader.datasource.url` | `String` | `null` | JDBC URL of the database. | `jdbccursoritemreader.datasource.username` | `String` | `null` | Login username of the database. | `jdbccursoritemreader.datasource.password` | `String` | `null` | Login password of the database. | `jdbccursoritemreader.datasource.driver-class-name` | `String` | `null` | Fully qualified name of the JDBC driver. |=== NOTE: The default `DataSource` will be used by the `JDBCCursorItemReader` if the `jdbccursoritemreader_datasource` is not specified. See the https://docs.spring.io/spring-batch/docs/4.3.x/api/org/springframework/batch/item/database/JdbcCursorItemReader.html[`JdbcCursorItemReader` documentation]. [[kafkaItemReader]] === KafkaItemReader Ingesting a partition of data from a Kafka topic is useful and exactly what the `KafkaItemReader` can do. To configure a `KafkaItemReader`, two pieces of configuration are required. First, configuring Kafka with Spring Boot's Kafka autoconfiguration is required (see the https://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#messaging.kafka.additional-properties[Spring Boot Kafka documentation]). Once you have configured the Kafka properties from Spring Boot, you can configure the `KafkaItemReader` itself by setting the following properties: .`KafkaItemReader` Properties |=== | Property | Type | Default Value | Description | `spring.batch.job.kafkaitemreader.name` | `String` | `null` | Name used to provide unique keys in the `ExecutionContext`. | `spring.batch.job.kafkaitemreader.topic` | `String` | `null` | Name of the topic from which to read. | `spring.batch.job.kafkaitemreader.partitions` | `List` | empty list | List of partition indices from which to read. | `spring.batch.job.kafkaitemreader.pollTimeOutInSeconds` | `long` | 30 | Timeout for the `poll()` operations. | `spring.batch.job.kafkaitemreader.saveState` | `boolean` | `true` | Determines whether the state should be saved for restarts. |=== See the https://docs.spring.io/spring-batch/docs/4.3.x/api/org/springframework/batch/item/kafka/KafkaItemReader.html[`KafkaItemReader` documentation]. [[nativeCompilation]] === Native Compilation The advantage of Single Step Batch Processing is that it lets you dynamically select which reader and writer beans to use at runtime when you use the JVM. However, when you use native compilation, you must determine the reader and writer at build time instead of runtime. The following example does so: [source,xml] org.springframework.boot spring-boot-maven-plugin process-aot process-aot -Dspring.batch.job.flatfileitemreader.name=fooReader -Dspring.batch.job.flatfileitemwriter.name=fooWriter [[item-processors]] == ItemProcessor Configuration The single-step batch job autoconfiguration accepts an `ItemProcessor` if one is available within the `ApplicationContext`. If one is found of the correct type (`ItemProcessor, Map>`), it is autowired into the step. [[item-writers]] == Autoconfiguration for ItemWriter implementations This starter provides autoconfiguration for `ItemWriter` implementations that match the supported `ItemReader` implementations: `AmqpItemWriter`, `FlatFileItemWriter`, `JdbcItemWriter`, and `KafkaItemWriter`. This section covers how to use autoconfiguration to configure a supported `ItemWriter`. [[amqpitemwriter]] === AmqpItemWriter To write to a RabbitMQ queue, you need two sets of configuration. First, you need an `AmqpTemplate`. The easiest way to get this is by using Spring Boot's RabbitMQ autoconfiguration. See the https://docs.spring.io/spring-boot/docs/3.0.x/reference/htmlsingle/#messaging.amqp.rabbitmq[Spring Boot AMQP documentation]. Once you have configured the `AmqpTemplate`, you can configure the `AmqpItemWriter` by setting the following properties: .`AmqpItemWriter` Properties |=== | Property | Type | Default Value | Description | `spring.batch.job.amqpitemwriter.enabled` | `boolean` | `false` | If `true`, the autoconfiguration runs. | `spring.batch.job.amqpitemwriter.jsonConverterEnabled` | `boolean` | `true` | Indicates whether `Jackson2JsonMessageConverter` should be registered to convert messages. |=== [[flatfileitemwriter]] === FlatFileItemWriter To write a file as the output of the step, you can configure `FlatFileItemWriter`. Autoconfiguration accepts components that have been explicitly configured (such as `LineAggregator`, `FieldExtractor`, `FlatFileHeaderCallback`, or a `FlatFileFooterCallback`) and components that have been configured by setting the following properties specified: .`FlatFileItemWriter` Properties |=== | Property | Type | Default Value | Description | `spring.batch.job.flatfileitemwriter.resource` | `Resource` | `null` | The resource to be read. | `spring.batch.job.flatfileitemwriter.delimited` | `boolean` | `false` | Indicates whether the output file is a delimited file. If `true`, `spring.batch.job.flatfileitemwriter.formatted` must be `false`. | `spring.batch.job.flatfileitemwriter.formatted` | `boolean` | `false` | Indicates whether the output file a formatted file. If `true`, `spring.batch.job.flatfileitemwriter.delimited` must be `false`. | `spring.batch.job.flatfileitemwriter.format` | `String` | `null` | The format used to generate the output for a formatted file. The formatting is performed by using `String.format`. | `spring.batch.job.flatfileitemwriter.locale` | `Locale` | `Locale.getDefault()` | The `Locale` to be used when generating the file. | `spring.batch.job.flatfileitemwriter.maximumLength` | `int` | 0 | Max length of the record. If 0, the size is unbounded. | `spring.batch.job.flatfileitemwriter.minimumLength` | `int` | 0 | The minimum record length. | `spring.batch.job.flatfileitemwriter.delimiter` | `String` | `,` | The `String` used to delimit fields in a delimited file. | `spring.batch.job.flatfileitemwriter.encoding` | `String` | `FlatFileItemReader.DEFAULT_CHARSET` | Encoding to use when writing the file. | `spring.batch.job.flatfileitemwriter.forceSync` | `boolean` | `false` | Indicates whether a file should be force-synced to the disk on flush. | `spring.batch.job.flatfileitemwriter.names` | `String []` | `null` | List of names for each field parsed from a record. These names are the keys in the `Map` for the items received by this `ItemWriter`. | `spring.batch.job.flatfileitemwriter.append` | `boolean` | `false` | Indicates whether a file should be appended to if the output file is found. | `spring.batch.job.flatfileitemwriter.lineSeparator` | `String` | `FlatFileItemWriter.DEFAULT_LINE_SEPARATOR` | What `String` to use to separate lines in the output file. | `spring.batch.job.flatfileitemwriter.name` | `String` | `null` | Name used to provide unique keys in the `ExecutionContext`. | `spring.batch.job.flatfileitemwriter.saveState` | `boolean` | `true` | Determines whether the state should be saved for restarts. | `spring.batch.job.flatfileitemwriter.shouldDeleteIfEmpty` | `boolean` | `false` | If set to `true`, an empty file (there is no output) is deleted when the job completes. | `spring.batch.job.flatfileitemwriter.shouldDeleteIfExists` | `boolean` | `true` | If set to `true` and a file is found where the output file should be, it is deleted before the step begins. | `spring.batch.job.flatfileitemwriter.transactional` | `boolean` | `FlatFileItemWriter.DEFAULT_TRANSACTIONAL` | Indicates whether the reader is a transactional queue (indicating that the items read are returned to the queue upon a failure). |=== See the https://docs.spring.io/spring-batch/docs/4.3.x/api/org/springframework/batch/item/file/FlatFileItemWriter.html[`FlatFileItemWriter` documentation]. [[jdbcitemwriter]] === JdbcBatchItemWriter To write the output of a step to a relational database, this starter provides the ability to autoconfigure a `JdbcBatchItemWriter`. The autoconfiguration lets you provide your own `ItemPreparedStatementSetter` or `ItemSqlParameterSourceProvider` and configuration options by setting the following properties: .`JdbcBatchItemWriter` Properties |=== | Property | Type | Default Value | Description | `spring.batch.job.jdbcbatchitemwriter.name` | `String` | `null` | Name used to provide unique keys in the `ExecutionContext`. | `spring.batch.job.jdbcbatchitemwriter.sql` | `String` | `null` | The SQL used to insert each item. | `spring.batch.job.jdbcbatchitemwriter.assertUpdates` | `boolean` | `true` | Whether to verify that every insert results in the update of at least one record. |=== You can also specify JDBC DataSource specifically for the writer by using the following properties: .`JdbcBatchItemWriter` Properties |=== | Property | Type | Default Value | Description | `spring.batch.job.jdbcbatchitemwriter.datasource.enable` | `boolean` | `false` | Determines whether `JdbcCursorItemReader` `DataSource` should be enabled. | `jdbcbatchitemwriter.datasource.url` | `String` | `null` | JDBC URL of the database. | `jdbcbatchitemwriter.datasource.username` | `String` | `null` | Login username of the database. | `jdbcbatchitemwriter.datasource.password` | `String` | `null` | Login password of the database. | `jdbcbatchitemreader.datasource.driver-class-name` | `String` | `null` | Fully qualified name of the JDBC driver. |=== NOTE: The default `DataSource` will be used by the `JdbcBatchItemWriter` if the `jdbcbatchitemwriter_datasource` is not specified. See the https://docs.spring.io/spring-batch/docs/4.3.x/api/org/springframework/batch/item/database/JdbcBatchItemWriter.html[`JdbcBatchItemWriter` documentation]. [[kafkaitemwriter]] === KafkaItemWriter To write step output to a Kafka topic, you need `KafkaItemWriter`. This starter provides autoconfiguration for a `KafkaItemWriter` by using facilities from two places. The first is Spring Boot's Kafka autoconfiguration. (See the https://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#messaging.kafka.additional-properties[Spring Boot Kafka documentation].) Second, this starter lets you configure two properties on the writer. .`KafkaItemWriter` Properties |=== | Property | Type | Default Value | Description | `spring.batch.job.kafkaitemwriter.topic` | `String` | `null` | The Kafka topic to which to write. | `spring.batch.job.kafkaitemwriter.delete` | `boolean` | `false` | Whether the items being passed to the writer are all to be sent as delete events to the topic. |=== For more about the configuration options for the `KafkaItemWriter`, see the https://docs.spring.io/spring-batch/docs/4.3.x/api/org/springframework/batch/item/kafka/KafkaItemWriter.html[`KafkaItemWiter` documentation]. [[spring-aot]] === Spring AOT When using Spring AOT with Single Step Batch Starter you must set the reader and writer name properties at compile time (unless you create a bean(s) for the reader and or writer). To do this you must include the name of the reader and writer that you wish to use as and argument or environment variable in the boot maven plugin or gradle plugin. For example if you wish to enable the `FlatFileItemReader` and `FlatFileItemWriter` in Maven it would look like: ``` org.springframework.boot spring-boot-maven-plugin process-aot process-aot --spring.batch.job.flatfileitemreader.name=foobar --spring.batch.job.flatfileitemwriter.name=fooWriter ``` ================================================ FILE: docs/modules/ROOT/pages/batch.adoc ================================================ [[batch]] = Batch [[partintro]] -- This section goes into more detail about Spring Cloud Task's integration with Spring Batch. Tracking the association between a job execution and the task in which it was executed. -- [[batch-association]] == Associating a Job Execution to the Task in which It Was Executed Spring Boot provides facilities for the execution of batch jobs within a Spring Boot Uber-jar. Spring Boot's support of this functionality lets a developer execute multiple batch jobs within that execution. Spring Cloud Task provides the ability to associate the execution of a job (a job execution) with a task's execution so that one can be traced back to the other. Spring Cloud Task achieves this functionality by using the `TaskBatchExecutionListener`. By default, this listener is auto configured in any context that has both a Spring Batch Job configured (by having a bean of type `Job` defined in the context) and the `spring-cloud-task-batch` jar on the classpath. The listener is injected into all jobs that meet those conditions. [[batch-association-override]] === Overriding the TaskBatchExecutionListener To prevent the listener from being injected into any batch jobs within the current context, you can disable the autoconfiguration by using standard Spring Boot mechanisms. To only have the listener injected into particular jobs within the context, override the `batchTaskExecutionListenerBeanPostProcessor` and provide a list of job bean IDs, as shown in the following example: [source,java] ---- public static TaskBatchExecutionListenerBeanPostProcessor batchTaskExecutionListenerBeanPostProcessor() { TaskBatchExecutionListenerBeanPostProcessor postProcessor = new TaskBatchExecutionListenerBeanPostProcessor(); postProcessor.setJobNames(Arrays.asList(new String[] {"job1", "job2"})); return postProcessor; } ---- NOTE: You can find a sample batch application in the samples module of the Spring Cloud Task Project, https://github.com/spring-cloud/spring-cloud-task/tree/master/spring-cloud-task-samples/batch-job[here]. [[batch-informational-messages]] == Batch Informational Messages Spring Cloud Task provides the ability for batch jobs to emit informational messages. The "`xref:stream.adoc#stream-integration-batch-events[Spring Batch Events]`" section covers this feature in detail. [[batch-failures-and-tasks]] == Batch Job Exit Codes As discussed xref:features.adoc#features-lifecycle-exit-codes[earlier], Spring Cloud Task applications support the ability to record the exit code of a task execution. However, in cases where you run a Spring Batch Job within a task, regardless of how the Batch Job Execution completes, the result of the task is always zero when using the default Batch/Boot behavior. Keep in mind that a task is a boot application and that the exit code returned from the task is the same as a boot application. To override this behavior and allow the task to return an exit code other than zero when a batch job returns an https://docs.spring.io/spring-batch/current/reference/html/step.html#batchStatusVsExitStatus[BatchStatus] of `FAILED`, set `spring.cloud.task.batch.fail-on-job-failure` to `true`. Then the exit code can be 1 (the default) or be based on the https://docs.spring.io/spring-boot/docs/current/reference/html/boot-features-spring-application.html#boot-features-application-exit[specified `ExitCodeGenerator`]) This functionality uses a new `ApplicationRunner` that replaces the one provided by Spring Boot. By default, it is configured with the same order. However, if you want to customize the order in which the `ApplicationRunner` is run, you can set its order by setting the `spring.cloud.task.batch.applicationRunnerOrder` property. To have your task return the exit code based on the result of the batch job execution, you need to write your own `CommandLineRunner`. //TODO Great place for a example showing how a custom CommandLineRunner ================================================ FILE: docs/modules/ROOT/pages/configprops.adoc ================================================ [[configuration-properties]] = Configuration Properties Below you can find a list of configuration properties. include::partial$_configprops.adoc[] ================================================ FILE: docs/modules/ROOT/pages/features.adoc ================================================ [[features]] = Features [[partintro]] -- This section goes into more detail about Spring Cloud Task, including how to use it, how to configure it, and the appropriate extension points. -- [[features-lifecycle]] == The lifecycle of a Spring Cloud Task In most cases, the modern cloud environment is designed around the execution of processes that are not expected to end. If they do end, they are typically restarted. While most platforms do have some way to run a process that is not restarted when it ends, the results of that run are typically not maintained in a consumable way. Spring Cloud Task offers the ability to execute short-lived processes in an environment and record the results. Doing so allows for a microservices architecture around short-lived processes as well as longer running services through the integration of tasks by messages. While this functionality is useful in a cloud environment, the same issues can arise in a traditional deployment model as well. When running Spring Boot applications with a scheduler such as cron, it can be useful to be able to monitor the results of the application after its completion. Spring Cloud Task takes the approach that a Spring Boot application can have a start and an end and still be successful. Batch applications are one example of how processes that are expected to end (and that are often short-lived) can be helpful. Spring Cloud Task records the lifecycle events of a given task. Most long-running processes, typified by most web applications, do not save their lifecycle events. The tasks at the heart of Spring Cloud Task do. The lifecycle consists of a single task execution. This is a physical execution of a Spring Boot application configured to be a task (that is, it has the Sprint Cloud Task dependencies). At the beginning of a task, before any `CommandLineRunner` or `ApplicationRunner` implementations have been run, an entry in the `TaskRepository` that records the start event is created. This event is triggered through `SmartLifecycle#start` being triggered by the Spring Framework. This indicates to the system that all beans are ready for use and comes before running any of the `CommandLineRunner` or `ApplicationRunner` implementations provided by Spring Boot. NOTE: The recording of a task only occurs upon the successful bootstrapping of an `ApplicationContext`. If the context fails to bootstrap at all, the task's run is not recorded. Upon completion of all of the `*Runner#run` calls from Spring Boot or the failure of an `ApplicationContext` (indicated by an `ApplicationFailedEvent`), the task execution is updated in the repository with the results. NOTE: If the application requires the `ApplicationContext` to be closed at the completion of a task (all `*Runner#run` methods have been called and the task repository has been updated), set the property `spring.cloud.task.closecontextEnabled` to true. [[features-task-execution-details]] === The TaskExecution The information stored in the `TaskRepository` is modeled in the `TaskExecution` class and consists of the following information: |=== |Field |Description |`executionid` |The unique ID for the task's run. |`exitCode` |The exit code generated from an `ExitCodeExceptionMapper` implementation. If there is no exit code generated but an `ApplicationFailedEvent` is thrown, 1 is set. Otherwise, it is assumed to be 0. |`taskName` |The name for the task, as determined by the configured `TaskNameResolver`. |`startTime` |The time the task was started, as indicated by the `SmartLifecycle#start` call. |`endTime` |The time the task was completed, as indicated by the `ApplicationReadyEvent`. |`exitMessage` |Any information available at the time of exit. This can programmatically be set by a `TaskExecutionListener`. |`errorMessage` |If an exception is the cause of the end of the task (as indicated by an `ApplicationFailedEvent`), the stack trace for that exception is stored here. |`arguments` |A `List` of the string command line arguments as they were passed into the executable boot application. |=== [[features-lifecycle-exit-codes]] === Mapping Exit Codes When a task completes, it tries to return an exit code to the OS. If we take a look at our xref:getting-started.adoc#getting-started-developing-first-task[original example], we can see that we are not controlling that aspect of our application. So, if an exception is thrown, the JVM returns a code that may or may not be of any use to you in debugging. Consequently, Spring Boot provides an interface, `ExitCodeExceptionMapper`, that lets you map uncaught exceptions to exit codes. Doing so lets you indicate, at the level of exit codes, what went wrong. Also, by mapping exit codes in this manner, Spring Cloud Task records the returned exit code. If the task terminates with a SIG-INT or a SIG-TERM, the exit code is zero unless otherwise specified within the code. NOTE: While the task is running, the exit code is stored as a null in the repository. Once the task completes, the appropriate exit code is stored based on the guidelines described earlier in this section. [[features-configuration]] == Configuration Spring Cloud Task provides a ready-to-use configuration, as defined in the `DefaultTaskConfigurer` and `SimpleTaskConfiguration` classes. This section walks through the defaults and how to customize Spring Cloud Task for your needs. [[features-data-source]] === DataSource Spring Cloud Task uses a datasource for storing the results of task executions. By default, we provide an in-memory instance of H2 to provide a simple method of bootstrapping development. However, in a production environment, you probably want to configure your own `DataSource`. If your application uses only a single `DataSource` and that serves as both your business schema and the task repository, all you need to do is provide any `DataSource` (the easiest way to do so is through Spring Boot's configuration conventions). This `DataSource` is automatically used by Spring Cloud Task for the repository. If your application uses more than one `DataSource`, you need to configure the task repository with the appropriate `DataSource`. This customization can be done through an implementation of `TaskConfigurer`. [[features-table-prefix]] === Table Prefix One modifiable property of `TaskRepository` is the table prefix for the task tables. By default, they are all prefaced with `TASK_`. `TASK_EXECUTION` and `TASK_EXECUTION_PARAMS` are two examples. However, there are potential reasons to modify this prefix. If the schema name needs to be prepended to the table names or if more than one set of task tables is needed within the same schema, you must change the table prefix. You can do so by setting the `spring.cloud.task.tablePrefix` to the prefix you need, as follows: `spring.cloud.task.tablePrefix=yourPrefix` By using the `spring.cloud.task.tablePrefix`, a user assumes the responsibility to create the task tables that meet both the criteria for the task table schema but with modifications that are required for a user's business needs. You can utilize the Spring Cloud Task Schema DDL as a guide when creating your own Task DDL as seen https://github.com/spring-cloud/spring-cloud-task/tree/master/spring-cloud-task-core/src/main/resources/org/springframework/cloud/task[here]. [[features-table-initialization]] === Enable/Disable table initialization In cases where you are creating the task tables and do not wish for Spring Cloud Task to create them at task startup, set the `spring.cloud.task.initialize-enabled` property to `false`, as follows: `spring.cloud.task.initialize-enabled=false` It defaults to `true`. NOTE: The property `spring.cloud.task.initialize.enable` has been deprecated. [[features-generated_task_id]] === Externally Generated Task ID In some cases, you may want to allow for the time difference between when a task is requested and when the infrastructure actually launches it. Spring Cloud Task lets you create a `TaskExecution` when the task is requested. Then pass the execution ID of the generated `TaskExecution` to the task so that it can update the `TaskExecution` through the task's lifecycle. A `TaskExecution` can be created by calling the `createTaskExecution` method on an implementation of the `TaskRepository` that references the datastore that holds the `TaskExecution` objects. In order to configure your Task to use a generated `TaskExecutionId`, add the following property: `spring.cloud.task.executionid=yourtaskId` [[features-external_task_id]] === External Task Id Spring Cloud Task lets you store an external task ID for each `TaskExecution`. In order to configure your Task to use a generated `TaskExecutionId`, add the following property: `spring.cloud.task.external-execution-id=` [[features-parent_task_id]] === Parent Task Id Spring Cloud Task lets you store a parent task ID for each `TaskExecution`. An example of this would be a task that executes another task or tasks and you want to record which task launched each of the child tasks. In order to configure your Task to set a parent `TaskExecutionId` add the following property on the child task: `spring.cloud.task.parent-execution-id=` [[features-task-configurer]] === TaskConfigurer The `TaskConfigurer` is a strategy interface that lets you customize the way components of Spring Cloud Task are configured. By default, we provide the `DefaultTaskConfigurer` that provides logical defaults: `Map`-based in-memory components (useful for development if no `DataSource` is provided) and JDBC based components (useful if there is a `DataSource` available). The `TaskConfigurer` lets you configure three main components: |=== |Component |Description |Default (provided by `DefaultTaskConfigurer`) |`TaskRepository` |The implementation of the `TaskRepository` to be used. |`SimpleTaskRepository` |`TaskExplorer` |The implementation of the `TaskExplorer` (a component for read-only access to the task repository) to be used. |`SimpleTaskExplorer` |`PlatformTransactionManager` |A transaction manager to be used when running updates for tasks. |`JdbcTransactionManager` if a `DataSource` is used. `ResourcelessTransactionManager` if it is not. |=== You can customize any of the components described in the preceding table by creating a custom implementation of the `TaskConfigurer` interface. Typically, extending the `DefaultTaskConfigurer` (which is provided if a `TaskConfigurer` is not found) and overriding the required getter is sufficient. However, implementing your own from scratch may be required. NOTE: Users should not directly use getter methods from a `TaskConfigurer` directly unless they are using it to supply implementations to be exposed as Spring Beans. [[features-task-execution-listener]] === Task Execution Listener `TaskExecutionListener` lets you register listeners for specific events that occur during the task lifecycle. To do so, create a class that implements the `TaskExecutionListener` interface. The class that implements the `TaskExecutionListener` interface is notified of the following events: * `onTaskStartup`: Prior to storing the `TaskExecution` into the `TaskRepository`. * `onTaskEnd`: Prior to updating the `TaskExecution` entry in the `TaskRepository` and marking the final state of the task. * `onTaskFailed`: Prior to the `onTaskEnd` method being invoked when an unhandled exception is thrown by the task. Spring Cloud Task also lets you add `TaskExecution` Listeners to methods within a bean by using the following method annotations: * `@BeforeTask`: Prior to the storing the `TaskExecution` into the `TaskRepository` * `@AfterTask`: Prior to the updating of the `TaskExecution` entry in the `TaskRepository` marking the final state of the task. * `@FailedTask`: Prior to the `@AfterTask` method being invoked when an unhandled exception is thrown by the task. The following example shows the three annotations in use: [source,java] ---- public class MyBean { @BeforeTask public void methodA(TaskExecution taskExecution) { } @AfterTask public void methodB(TaskExecution taskExecution) { } @FailedTask public void methodC(TaskExecution taskExecution, Throwable throwable) { } } ---- NOTE: Inserting an `ApplicationListener` earlier in the chain than `TaskLifecycleListener` exists may cause unexpected effects. [[features-task-execution-listener-Exceptions]] ==== Exceptions Thrown by Task Execution Listener If an exception is thrown by a `TaskExecutionListener` event handler, all listener processing for that event handler stops. For example, if three `onTaskStartup` listeners have started and the first `onTaskStartup` event handler throws an exception, the other two `onTaskStartup` methods are not called. However, the other event handlers (`onTaskEnd` and `onTaskFailed`) for the `TaskExecutionListeners` are called. The exit code returned when a exception is thrown by a `TaskExecutionListener` event handler is the exit code that was reported by the https://docs.spring.io/spring-boot/docs/current/api/org/springframework/boot/ExitCodeEvent.html[ExitCodeEvent]. If no `ExitCodeEvent` is emitted, the Exception thrown is evaluated to see if it is of type https://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#boot-features-application-exit[ExitCodeGenerator]. If so, it returns the exit code from the `ExitCodeGenerator`. Otherwise, `1` is returned. In the case that an exception is thrown in an `onTaskStartup` method, the exit code for the application will be `1`. If an exception is thrown in either a `onTaskEnd` or `onTaskFailed` method, the exit code for the application will be the one established using the rules enumerated above. NOTE: In the case of an exception being thrown in a `onTaskStartup`, `onTaskEnd`, or `onTaskFailed` you can not override the exit code for the application using `ExitCodeExceptionMapper`. [[features-task-execution-listener-exit-messages]] ==== Exit Messages You can set the exit message for a task programmatically by using a `TaskExecutionListener`. This is done by setting the `TaskExecution's` `exitMessage`, which then gets passed into the `TaskExecutionListener`. The following example shows a method that is annotated with the `@AfterTask` `ExecutionListener` : [source,java] @AfterTask public void afterMe(TaskExecution taskExecution) { taskExecution.setExitMessage("AFTER EXIT MESSAGE"); } An `ExitMessage` can be set at any of the listener events (`onTaskStartup`, `onTaskFailed`, and `onTaskEnd`). The order of precedence for the three listeners follows: . `onTaskEnd` . `onTaskFailed` . `onTaskStartup` For example, if you set an `exitMessage` for the `onTaskStartup` and `onTaskFailed` listeners and the task ends without failing, the `exitMessage` from the `onTaskStartup` is stored in the repository. Otherwise, if a failure occurs, the `exitMessage` from the `onTaskFailed` is stored. Also if you set the `exitMessage` with an `onTaskEnd` listener, the `exitMessage` from the `onTaskEnd` supersedes the exit messages from both the `onTaskStartup` and `onTaskFailed`. [[features-single-instance-enabled]] === Restricting Spring Cloud Task Instances Spring Cloud Task lets you establish that only one task with a given task name can be run at a time. To do so, you need to establish the <> and set `spring.cloud.task.single-instance-enabled=true` for each task execution. While the first task execution is running, any other time you try to run a task with the same <> and `spring.cloud.task.single-instance-enabled=true`, the task fails with the following error message: `Task with name "application" is already running.` The default value for `spring.cloud.task.single-instance-enabled` is `false`. The following example shows how to set `spring.cloud.task.single-instance-enabled` to `true`: `spring.cloud.task.single-instance-enabled=true or false` To use this feature, you must add the following Spring Integration dependencies to your application: [source,xml] org.springframework.integration spring-integration-core org.springframework.integration spring-integration-jdbc NOTE: The exit code for the application will be 1 if the task fails because this feature is enabled and another task is running with the same task name. [[single-instance-usage-for-spring-aot-and-native-compilation]] ==== Single Instance Usage for Spring AOT And Native Compilation To use Spring Cloud Task's single-instance feature when creating a natively compiled app, you need to enable the feature at build time. To do so, add the process-aot execution and set `spring.cloud.task.single-step-instance-enabled=true` as a JVM argument, as follows: [source,xml] org.springframework.boot spring-boot-maven-plugin process-aot process-aot -Dspring.cloud.task.single-instance-enabled=true [[enabling-observations-for-applicationrunner-and-commandlinerunner]] === Enabling Observations for ApplicationRunner and CommandLineRunner To Enable Task Observations for `ApplicationRunner` or `CommandLineRunner` set `spring.cloud.task.observation.enabled` to true. An example task application with observations enables using the `SimpleMeterRegistry` can be found https://github.com/spring-cloud/spring-cloud-task/tree/main/spring-cloud-task-samples/task-observations[here]. [[disabling-spring-cloud-task-auto-configuration]] === Disabling Spring Cloud Task Auto Configuration In cases where Spring Cloud Task should not be autoconfigured for an implementation, you can disable Task's auto configuration. This can be done either by adding the following annotation to your Task application: ``` @EnableAutoConfiguration(exclude={SimpleTaskAutoConfiguration.class}) ``` You may also disable Task auto configuration by setting the `spring.cloud.task.autoconfiguration.enabled` property to `false`. [[closing-the-context]] === Closing the Context If the application requires the `ApplicationContext` to be closed at the completion of a task (all `*Runner#run` methods have been called and the task repository has been updated), set the property `spring.cloud.task.closecontextEnabled` to `true`. Another case to close the context is when the Task Execution completes however the application does not terminate. In these cases the context is held open because a thread has been allocated (for example: if you are using a TaskExecutor). In these cases set the `spring.cloud.task.closecontextEnabled` property to `true` when launching your task. This will close the application's context once the task is complete. Thus allowing the application to terminate. [[enable-task-metrics]] === Enable Task Metrics Spring Cloud Task integrates with Micrometer and creates observations for the Tasks it executes. To enable Task Observability integration, you must add `spring-boot-starter-actuator`, your preferred registry implementation (if you want to publish metrics), and micrometer-tracing (if you want to publish tracing data) to your task application. An example maven set of dependencies to enable task observability and metrics using Influx would be: [source,xml] org.springframework.boot spring-boot-starter-actuator io.micrometer micrometer-registry-influx runtime [[spring-task-and-spring-cloud-task]] === Spring Task and Spring Cloud Task Properties The term `task` is frequently used word in the industry. In one such example Spring Boot offers the `spring.task` while Spring Cloud Task offers the `spring.cloud.task` properties. This has caused some confusion in the past that these two groups of properties are directly related. However, they represent 2 different set of features offered in the Spring ecosystem. * `spring.task` refers to the properties that configure the `ThreadPoolTaskScheduler`. * `spring.cloud.task` refers to the properties that configure features of Spring Cloud Task. ================================================ FILE: docs/modules/ROOT/pages/getting-started.adoc ================================================ [[getting-started]] = Getting started [[partintro]] -- If you are just getting started with Spring Cloud Task, you should read this section. Here, we answer the basic "`what?`", "`how?`", and "`why?`" questions. We start with a gentle introduction to Spring Cloud Task. We then build a Spring Cloud Task application, discussing some core principles as we go. -- [[getting-started-introducing-spring-cloud-task]] == Introducing Spring Cloud Task Spring Cloud Task makes it easy to create short-lived microservices. It provides capabilities that let short-lived JVM processes be executed on demand in a production environment. [[getting-started-system-requirements]] == System Requirements You need to have Java installed (Java 17 or better). [[database-requirements]] === Database Requirements Spring Cloud Task uses a relational database to store the results of an executed task. While you can begin developing a task without a database (the status of the task is logged as part of the task repository's updates), for production environments, you want to use a supported database. Spring Cloud Task currently supports the following databases: * DB2 * H2 * HSQLDB * MySql * Oracle * Postgres * SqlServer [[getting-started-developing-first-task]] == Developing Your First Spring Cloud Task Application A good place to start is with a simple "`Hello, World!`" application, so we create the Spring Cloud Task equivalent to highlight the features of the framework. Most IDEs have good support for Apache Maven, so we use it as the build tool for this project. NOTE: The spring.io web site contains many https://spring.io/guides[“`Getting Started`” guides] that use Spring Boot. If you need to solve a specific problem, check there first. You can shortcut the following steps by going to the https://start.spring.io/[Spring Initializr] and creating a new project. Doing so automatically generates a new project structure so that you can start coding right away. We recommend experimenting with the Spring Initializr to become familiar with it. [[getting-started-creating-project]] === Creating the Spring Task Project using Spring Initializr Now we can create and test an application that prints `Hello, World!` to the console. To do so: . Visit the link:https://start.spring.io/[Spring Initialzr] site. .. Create a new Maven project with a *Group* name of `io.spring.demo` and an *Artifact* name of `helloworld`. .. In the Dependencies text box, type `task` and then select the `Task` dependency with the `Spring Cloud` label. .. In the Dependencies text box, type `h2` and then select the `H2` dependency with the `SQL` label. .. Click the *Generate Project* button . Unzip the helloworld.zip file and import the project into your favorite IDE. [[getting-started-writing-the-code]] === Writing the Code To finish our application, we need to update the generated `HelloworldApplication` with the following contents so that it launches a Task. [source,java] ---- package io.spring.demo.helloworld; import org.springframework.boot.ApplicationArguments; import org.springframework.boot.ApplicationRunner; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.context.annotation.Bean; @SpringBootApplication @EnableTask public class HelloworldApplication { @Bean public ApplicationRunner applicationRunner() { return new HelloWorldApplicationRunner(); } public static void main(String[] args) { SpringApplication.run(HelloworldApplication.class, args); } public static class HelloWorldApplicationRunner implements ApplicationRunner { @Override public void run(ApplicationArguments args) throws Exception { System.out.println("Hello, World!"); } } } ---- While it may seem small, quite a bit is going on. For more about Spring Boot specifics, see the https://docs.spring.io/spring-boot/docs/current/reference/html/[Spring Boot reference documentation]. Now we can open the `application.properties` file in `src/main/resources`. We need to configure two properties in `application.properties`: * `application.name`: To set the application name (which is translated to the task name) * `logging.level`: To set the logging for Spring Cloud Task to `DEBUG` in order to get a view of what is going on. The following example shows how to do both: [source] ---- logging.level.org.springframework.cloud.task=DEBUG spring.application.name=helloWorld ---- [[getting-started-at-task]] ==== Task Auto Configuration When including Spring Cloud Task Starter dependency, Task auto configures all beans to bootstrap it's functionality. Part of this configuration registers the `TaskRepository` and the infrastructure for its use. In our demo, the `TaskRepository` uses an embedded H2 database to record the results of a task. This H2 embedded database is not a practical solution for a production environment, since the H2 DB goes away once the task ends. However, for a quick getting-started experience, we can use this in our example as well as echoing to the logs what is being updated in that repository. In the xref:features.adoc#features-configuration[Configuration] section (later in this documentation), we cover how to customize the configuration of the pieces provided by Spring Cloud Task. When our sample application runs, Spring Boot launches our `HelloWorldApplicationRunner` and outputs our "`Hello, World!`" message to standard out. The `TaskLifecycleListener` records the start of the task and the end of the task in the repository. [[getting-started-main-method]] ==== The main method The main method serves as the entry point to any java application. Our main method delegates to Spring Boot's https://docs.spring.io/spring-boot/docs/current/reference/html/boot-features-spring-application.html[SpringApplication] class. [[getting-started-clr]] ==== The ApplicationRunner Spring includes many ways to bootstrap an application's logic. Spring Boot provides a convenient method of doing so in an organized manner through its `*Runner` interfaces (`CommandLineRunner` or `ApplicationRunner`). A well behaved task can bootstrap any logic by using one of these two runners. The lifecycle of a task is considered from before the `*Runner#run` methods are executed to once they are all complete. Spring Boot lets an application use multiple `*Runner` implementations, as does Spring Cloud Task. NOTE: Any processing bootstrapped from mechanisms other than a `CommandLineRunner` or `ApplicationRunner` (by using `InitializingBean#afterPropertiesSet` for example) is not recorded by Spring Cloud Task. [[getting-started-running-the-example]] === Running the Example At this point, our application should work. Since this application is Spring Boot-based, we can run it from the command line by using `$ ./mvnw spring-boot:run` from the root of our application, as shown (with its output) in the following example: [source] ---- $ mvn clean spring-boot:run ....... . . . ....... . . . (Maven log output here) ....... . . . . ____ _ __ _ _ /\\ / ___'_ __ _ _(_)_ __ __ _ \ \ \ \ ( ( )\___ | '_ | '_| | '_ \/ _` | \ \ \ \ \\/ ___)| |_)| | | | | || (_| | ) ) ) ) ' |____| .__|_| |_|_| |_\__, | / / / / =========|_|==============|___/=/_/_/_/ :: Spring Boot :: (v3.3.0) 2024-01-04T10:07:01.102-06:00 INFO 18248 --- [helloWorld] [ main] i.s.d.helloworld.HelloworldApplication : Starting HelloworldApplication using Java 21.0.1 with PID 18248 (/Users/dashaun/fun/dashaun/spring-cloud-task/helloworld/target/classes started by dashaun in /Users/dashaun/fun/dashaun/spring-cloud-task/helloworld) 2024-01-04T10:07:01.103-06:00 INFO 18248 --- [helloWorld] [ main] i.s.d.helloworld.HelloworldApplication : No active profile set, falling back to 1 default profile: "default" 2024-01-04T10:07:01.526-06:00 INFO 18248 --- [helloWorld] [ main] com.zaxxer.hikari.HikariDataSource : HikariPool-1 - Starting... 2024-01-04T10:07:01.626-06:00 INFO 18248 --- [helloWorld] [ main] com.zaxxer.hikari.pool.HikariPool : HikariPool-1 - Added connection conn0: url=jdbc:h2:mem:3ad913f8-59ce-4785-bf8e-d6335dff6856 user=SA 2024-01-04T10:07:01.627-06:00 INFO 18248 --- [helloWorld] [ main] com.zaxxer.hikari.HikariDataSource : HikariPool-1 - Start completed. 2024-01-04T10:07:01.633-06:00 DEBUG 18248 --- [helloWorld] [ main] o.s.c.t.c.SimpleTaskAutoConfiguration : Using org.springframework.cloud.task.configuration.DefaultTaskConfigurer TaskConfigurer 2024-01-04T10:07:01.633-06:00 DEBUG 18248 --- [helloWorld] [ main] o.s.c.t.c.DefaultTaskConfigurer : No EntityManager was found, using DataSourceTransactionManager 2024-01-04T10:07:01.639-06:00 DEBUG 18248 --- [helloWorld] [ main] o.s.c.t.r.s.TaskRepositoryInitializer : Initializing task schema for h2 database 2024-01-04T10:07:01.772-06:00 DEBUG 18248 --- [helloWorld] [ main] o.s.c.t.r.support.SimpleTaskRepository : Creating: TaskExecution{executionId=0, parentExecutionId=null, exitCode=null, taskName='helloWorld', startTime=2024-01-04T10:07:01.757268, endTime=null, exitMessage='null', externalExecutionId='null', errorMessage='null', arguments=[]} 2024-01-04T10:07:01.785-06:00 INFO 18248 --- [helloWorld] [ main] i.s.d.helloworld.HelloworldApplication : Started HelloworldApplication in 0.853 seconds (process running for 1.029) Hello, World! 2024-01-04T10:07:01.794-06:00 DEBUG 18248 --- [helloWorld] [ main] o.s.c.t.r.support.SimpleTaskRepository : Updating: TaskExecution with executionId=1 with the following {exitCode=0, endTime=2024-01-04T10:07:01.787112, exitMessage='null', errorMessage='null'} 2024-01-04T10:07:01.799-06:00 INFO 18248 --- [helloWorld] [ionShutdownHook] com.zaxxer.hikari.HikariDataSource : HikariPool-1 - Shutdown initiated... 2024-01-04T10:07:01.806-06:00 INFO 18248 --- [helloWorld] [ionShutdownHook] com.zaxxer.hikari.HikariDataSource : HikariPool-1 - Shutdown completed. ....... . . . ....... . . . (Maven log output here) ....... . . . ---- The preceding output has three lines that are of interest to us here: * `SimpleTaskRepository` logged the creation of the entry in the `TaskRepository`. * The execution of our `ApplicationRunner`, demonstrated by the "`Hello, World!`" output. * `SimpleTaskRepository` logs the completion of the task in the `TaskRepository`. NOTE: A simple task application can be found in the samples module of the Spring Cloud Task Project https://github.com/spring-cloud/spring-cloud-task/tree/master/spring-cloud-task-samples/timestamp[here]. ================================================ FILE: docs/modules/ROOT/pages/index.adoc ================================================ [[spring-cloud-task-reference-guide]] = Spring Cloud Task Reference Guide Michael Minella, Glenn Renfro, Jay Bryant :page-section-summary-toc: 1 include::preface.adoc[leveloffset=1] // ====================================================================================== Version {project-version} (C) 2009-2022 VMware, Inc. All rights reserved. Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically. // ====================================================================================== ================================================ FILE: docs/modules/ROOT/pages/observability.adoc ================================================ [[observability]] = Observability == Observability metadata include::partial$_metrics.adoc[] include::partial$_spans.adoc[] ================================================ FILE: docs/modules/ROOT/pages/preface.adoc ================================================ [[preface]] = Preface [[task-documentation-about]] This section provides a brief overview of the Spring Cloud Task reference documentation. Think of it as a map for the rest of the document. You can read this reference guide in a linear fashion or you can skip sections if something does not interest you. [[about-the-documentation]] == About the documentation The Spring Cloud Task reference guide is available in https://docs.spring.io/spring-cloud-task/docs/current/reference[html]. The latest copy is available at https://docs.spring.io/spring-cloud-task/reference/. Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically. [[task-documentation-getting-help]] == Getting help Having trouble with Spring Cloud Task? We would like to help! * Ask a question. We monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-cloud-task[`spring-cloud-task`]. * Report bugs with Spring Cloud Task at https://github.com/spring-cloud/spring-cloud-task/issues. NOTE: All of Spring Cloud Task is open source, including the documentation. If you find a problem with the docs or if you just want to improve them, please {github-code}[get involved]. [[task-documentation-first-steps]] == First Steps If you are just getting started with Spring Cloud Task or with 'Spring' in general, we suggesting reading the xref:getting-started.adoc[Getting started] chapter. To get started from scratch, read the following sections: * xref:getting-started.adoc#getting-started-introducing-spring-cloud-task[Introducing Spring Cloud Task] * xref:getting-started.adoc#getting-started-system-requirements[System Requirements] + To follow the tutorial, read xref:getting-started.adoc#getting-started-developing-first-task[Developing Your First Spring Cloud Task Application] + To run your example, read xref:getting-started.adoc#getting-started-running-the-example[Running the Example] ================================================ FILE: docs/modules/ROOT/pages/stream.adoc ================================================ [[stream-integration]] = Spring Cloud Stream Integration [[partintro]] -- A task by itself can be useful, but integration of a task into a larger ecosystem lets it be useful for more complex processing and orchestration. This section covers the integration options for Spring Cloud Task with Spring Cloud Stream. -- [[stream-integration-events]] == Spring Cloud Task Events Spring Cloud Task provides the ability to emit events through a Spring Cloud Stream channel when the task is run through a Spring Cloud Stream channel. A task listener is used to publish the `TaskExecution` on a message channel named `task-events`. This feature is autowired into any task that has `spring-cloud-stream`, `spring-cloud-stream-`, and a defined task on its classpath. NOTE: To disable the event emitting listener, set the `spring.cloud.task.events.enabled` property to `false`. With the appropriate classpath defined, the following task emits the `TaskExecution` as an event on the `task-events` channel (at both the start and the end of the task): [source, java] ---- @SpringBootApplication public class TaskEventsApplication { public static void main(String[] args) { SpringApplication.run(TaskEventsApplication.class, args); } @Configuration public static class TaskConfiguration { @Bean public ApplicationRunner applicationRunner() { return new ApplicationRunner() { @Override public void run(ApplicationArguments args) { System.out.println("The ApplicationRunner was executed"); } }; } } } ---- NOTE: A binder implementation is also required to be on the classpath. NOTE: A sample task event application can be found in the samples module of the Spring Cloud Task Project, https://github.com/spring-cloud/spring-cloud-task/tree/master/spring-cloud-task-samples/task-events[here]. [[stream-integration-disable-task-events]] === Disabling Specific Task Events To disable task events, you can set the `spring.cloud.task.events.enabled` property to `false`. [[stream-integration-batch-events]] == Spring Batch Events When executing a Spring Batch job through a task, Spring Cloud Task can be configured to emit informational messages based on the Spring Batch listeners available in Spring Batch. Specifically, the following Spring Batch listeners are autoconfigured into each batch job and emit messages on the associated Spring Cloud Stream channels when run through Spring Cloud Task: * `JobExecutionListener` listens for `job-execution-events` * `StepExecutionListener` listens for `step-execution-events` * `ChunkListener` listens for `chunk-events` * `ItemReadListener` listens for `item-read-events` * `ItemProcessListener` listens for `item-process-events` * `ItemWriteListener` listens for `item-write-events` * `SkipListener` listens for `skip-events` These listeners are autoconfigured into any `AbstractJob` when the appropriate beans (a `Job` and a `TaskLifecycleListener`) exist in the context. Configuration to listen to these events is handled the same way binding to any other Spring Cloud Stream channel is done. Our task (the one running the batch job) serves as a `Source`, with the listening applications serving as either a `Processor` or a `Sink`. An example could be to have an application listening to the `job-execution-events` channel for the start and stop of a job. To configure the listening application, you would configure the input to be `job-execution-events` as follows: `spring.cloud.stream.bindings.input.destination=job-execution-events` NOTE: A binder implementation is also required to be on the classpath. NOTE: A sample batch event application can be found in the samples module of the Spring Cloud Task Project, https://github.com/spring-cloud/spring-cloud-task/tree/master/spring-cloud-task-samples/batch-events[here]. [[sending-batch-events-to-different-channels]] === Sending Batch Events to Different Channels One of the options that Spring Cloud Task offers for batch events is the ability to alter the channel to which a specific listener can emit its messages. To do so, use the following configuration: `spring.cloud.stream.bindings..destination=`. For example, if `StepExecutionListener` needs to emit its messages to another channel called `my-step-execution-events` instead of the default `step-execution-events`, you can add the following configuration: `spring.cloud.task.batch.events.step-execution-events-binding-name=my-step-execution-events` [[disabling-batch-events]] === Disabling Batch Events To disable the listener functionality for all batch events, use the following configuration: `spring.cloud.task.batch.events.enabled=false` To disable a specific batch event, use the following configuration: `spring.cloud.task.batch.events..enabled=false`: The following listing shows individual listeners that you can disable: [source,bash] ---- spring.cloud.task.batch.events.job-execution.enabled=false spring.cloud.task.batch.events.step-execution.enabled=false spring.cloud.task.batch.events.chunk.enabled=false spring.cloud.task.batch.events.item-read.enabled=false spring.cloud.task.batch.events.item-process.enabled=false spring.cloud.task.batch.events.item-write.enabled=false spring.cloud.task.batch.events.skip.enabled=false ---- [[emit-order-for-batch-events]] === Emit Order for Batch Events By default, batch events have `Ordered.LOWEST_PRECEDENCE`. To change this value (for example, to 5 ), use the following configuration: [source,bash] ---- spring.cloud.task.batch.events.job-execution-order=5 spring.cloud.task.batch.events.step-execution-order=5 spring.cloud.task.batch.events.chunk-order=5 spring.cloud.task.batch.events.item-read-order=5 spring.cloud.task.batch.events.item-process-order=5 spring.cloud.task.batch.events.item-write-order=5 spring.cloud.task.batch.events.skip-order=5 ---- ================================================ FILE: docs/modules/ROOT/partials/_configprops.adoc ================================================ |=== |Name | Default | Description |spring.cloud.task.batch.application-runner-order | `+++0+++` | The order for the {@code ApplicationRunner} used to run batch jobs when {@code spring.cloud.task.batch.fail-on-job-failure=true}. Defaults to 0 (same as the {@link org.springframework.boot.batch.autoconfigure.JobLauncherApplicationRunner}). |spring.cloud.task.batch.command-line-runner-order | | |spring.cloud.task.batch.events.chunk-event-binding-name | `+++chunk-events+++` | |spring.cloud.task.batch.events.chunk-order | | Establishes the default {@link Ordered} precedence for {@link org.springframework.batch.core.ChunkListener}. |spring.cloud.task.batch.events.chunk.enabled | `+++true+++` | This property is used to determine if a task should listen for batch chunk events. |spring.cloud.task.batch.events.enabled | `+++true+++` | This property is used to determine if a task should listen for batch events. |spring.cloud.task.batch.events.item-process-event-binding-name | `+++item-process-events+++` | |spring.cloud.task.batch.events.item-process-order | | Establishes the default {@link Ordered} precedence for {@link org.springframework.batch.core.ItemProcessListener}. |spring.cloud.task.batch.events.item-process.enabled | `+++true+++` | This property is used to determine if a task should listen for batch item processed events. |spring.cloud.task.batch.events.item-read-event-binding-name | `+++item-read-events+++` | |spring.cloud.task.batch.events.item-read-order | | Establishes the default {@link Ordered} precedence for {@link org.springframework.batch.core.ItemReadListener}. |spring.cloud.task.batch.events.item-read.enabled | `+++true+++` | This property is used to determine if a task should listen for batch item read events. |spring.cloud.task.batch.events.item-write-event-binding-name | `+++item-write-events+++` | |spring.cloud.task.batch.events.item-write-order | | Establishes the default {@link Ordered} precedence for {@link org.springframework.batch.core.ItemWriteListener}. |spring.cloud.task.batch.events.item-write.enabled | `+++true+++` | This property is used to determine if a task should listen for batch item write events. |spring.cloud.task.batch.events.job-execution-event-binding-name | `+++job-execution-events+++` | |spring.cloud.task.batch.events.job-execution-order | | Establishes the default {@link Ordered} precedence for {@link org.springframework.batch.core.JobExecutionListener}. |spring.cloud.task.batch.events.job-execution.enabled | `+++true+++` | This property is used to determine if a task should listen for batch job execution events. |spring.cloud.task.batch.events.skip-event-binding-name | `+++skip-events+++` | |spring.cloud.task.batch.events.skip-order | | Establishes the default {@link Ordered} precedence for {@link org.springframework.batch.core.SkipListener}. |spring.cloud.task.batch.events.skip.enabled | `+++true+++` | This property is used to determine if a task should listen for batch skip events. |spring.cloud.task.batch.events.step-execution-event-binding-name | `+++step-execution-events+++` | |spring.cloud.task.batch.events.step-execution-order | | Establishes the default {@link Ordered} precedence for {@link org.springframework.batch.core.StepExecutionListener}. |spring.cloud.task.batch.events.step-execution.enabled | `+++true+++` | This property is used to determine if a task should listen for batch step execution events. |spring.cloud.task.batch.events.task-event-binding-name | `+++task-events+++` | |spring.cloud.task.batch.fail-on-job-failure | `+++false+++` | This property is used to determine if a task app should return with a non zero exit code if a batch job fails. |spring.cloud.task.batch.fail-on-job-failure-poll-interval | `+++5000+++` | Fixed delay in milliseconds that Spring Cloud Task will wait when checking if {@link org.springframework.batch.core.JobExecution}s have completed, when spring.cloud.task.batch.failOnJobFailure is set to true. Defaults to 5000. |spring.cloud.task.batch.job-names | | Comma-separated list of job names to execute on startup (for instance, `job1,job2`). By default, all Jobs found in the context are executed. @deprecated use spring.batch.job.name instead of spring.cloud.task.batch.jobNames. |spring.cloud.task.batch.listener.enabled | `+++true+++` | This property is used to determine if a task will be linked to the batch jobs that are run. |spring.cloud.task.closecontext-enabled | `+++false+++` | When set to true the context is closed at the end of the task. Else the context remains open. |spring.cloud.task.events.enabled | `+++true+++` | This property is used to determine if a task app should emit task events. |spring.cloud.task.executionid | | An id that will be used by the task when updating the task execution. |spring.cloud.task.external-execution-id | | An id that can be associated with a task. |spring.cloud.task.initialize-enabled | | If set to true then tables are initialized. If set to false tables are not initialized. Defaults to null. The requirement for it to be defaulted to null is so that we can support the spring.cloud.task.initialize.enable until it is removed. |spring.cloud.task.parent-execution-id | | The id of the parent task execution id that launched this task execution. Defaults to null if task execution had no parent. |spring.cloud.task.single-instance-enabled | `+++false+++` | This property is used to determine if a task will execute if another task with the same app name is running. |spring.cloud.task.single-instance-lock-check-interval | `+++500+++` | Declares the time (in millis) that a task execution will wait between checks. Default time is: 500 millis. |spring.cloud.task.single-instance-lock-ttl | | Declares the maximum amount of time (in millis) that a task execution can hold a lock to prevent another task from executing with a specific task name when the single-instance-enabled is set to true. Default time is: Integer.MAX_VALUE. |spring.cloud.task.table-prefix | `+++TASK_+++` | The prefix to append to the table names created by Spring Cloud Task. |spring.cloud.task.transaction-manager | `+++springCloudTaskTransactionManager+++` | This property is used to specify the transaction manager for TaskRepository. By default, a dedicated transaction manager is created by spring. |=== ================================================ FILE: docs/modules/ROOT/partials/_conventions.adoc ================================================ [[observability-conventions]] === Observability - Conventions Below you can find a list of all `GlobalObservationConvention` and `ObservationConvention` declared by this project. .ObservationConvention implementations |=== |ObservationConvention Class Name | Applicable ObservationContext Class Name |`org.springframework.cloud.task.listener.DefaultTaskExecutionObservationConvention`|`TaskExecutionObservationContext` |`org.springframework.cloud.task.listener.TaskExecutionObservationConvention`|`TaskExecutionObservationContext` |`org.springframework.cloud.task.configuration.observation.DefaultTaskObservationConvention`|`TaskObservationContext` |`org.springframework.cloud.task.configuration.observation.TaskObservationConvention`|`TaskObservationContext` |=== ================================================ FILE: docs/modules/ROOT/partials/_metrics.adoc ================================================ [[observability-metrics]] === Observability - Metrics Below you can find a list of all metrics declared by this project. [[observability-metrics-task-active]] ==== Task Active ____ Metrics created around a task execution. ____ **Metric name** `spring.cloud.task` (defined by convention class `org.springframework.cloud.task.listener.DefaultTaskExecutionObservationConvention`). **Type** `timer`. **Metric name** `spring.cloud.task.active` (defined by convention class `org.springframework.cloud.task.listener.DefaultTaskExecutionObservationConvention`). **Type** `long task timer`. IMPORTANT: KeyValues that are added after starting the Observation might be missing from the *.active metrics. IMPORTANT: Micrometer internally uses `nanoseconds` for the baseunit. However, each backend determines the actual baseunit. (i.e. Prometheus uses seconds) Fully qualified name of the enclosing class `org.springframework.cloud.task.listener.TaskExecutionObservation`. IMPORTANT: All tags must be prefixed with `spring.cloud.task` prefix! .Low cardinality Keys [cols="a,a"] |=== |Name | Description |`spring.cloud.task.cf.app.id` _(required)_|App id for CF cloud. |`spring.cloud.task.cf.app.name` _(required)_|App name for CF cloud. |`spring.cloud.task.cf.app.version` _(required)_|App version for CF cloud. |`spring.cloud.task.cf.instance.index` _(required)_|Instance index for CF cloud. |`spring.cloud.task.cf.org.name` _(required)_|Organization Name for CF cloud. |`spring.cloud.task.cf.space.id` _(required)_|Space id for CF cloud. |`spring.cloud.task.cf.space.name` _(required)_|Space name for CF cloud. |`spring.cloud.task.execution.id` _(required)_|Task execution id. |`spring.cloud.task.exit.code` _(required)_|Task exit code. |`spring.cloud.task.external.execution.id` _(required)_|External execution id for task. |`spring.cloud.task.name` _(required)_|Task name measurement. |`spring.cloud.task.parent.execution.id` _(required)_|Task parent execution id. |`spring.cloud.task.status` _(required)_|task status. Can be either success or failure. |=== [[observability-metrics-task-runner-observation]] ==== Task Runner Observation ____ Observation created when a task runner is executed. ____ **Metric name** `spring.cloud.task.runner` (defined by convention class `org.springframework.cloud.task.configuration.observation.DefaultTaskObservationConvention`). **Type** `timer`. **Metric name** `spring.cloud.task.runner.active` (defined by convention class `org.springframework.cloud.task.configuration.observation.DefaultTaskObservationConvention`). **Type** `long task timer`. IMPORTANT: KeyValues that are added after starting the Observation might be missing from the *.active metrics. IMPORTANT: Micrometer internally uses `nanoseconds` for the baseunit. However, each backend determines the actual baseunit. (i.e. Prometheus uses seconds) Fully qualified name of the enclosing class `org.springframework.cloud.task.configuration.observation.TaskDocumentedObservation`. IMPORTANT: All tags must be prefixed with `spring.cloud.task` prefix! .Low cardinality Keys [cols="a,a"] |=== |Name | Description |`spring.cloud.task.runner.bean-name` _(required)_|Name of the bean that was executed by Spring Cloud Task. |=== ================================================ FILE: docs/modules/ROOT/partials/_spans.adoc ================================================ [[observability-spans]] === Observability - Spans Below you can find a list of all spans declared by this project. [[observability-spans-task-active]] ==== Task Active Span > Metrics created around a task execution. **Span name** `spring.cloud.task` (defined by convention class `org.springframework.cloud.task.listener.DefaultTaskExecutionObservationConvention`). Fully qualified name of the enclosing class `org.springframework.cloud.task.listener.TaskExecutionObservation`. IMPORTANT: All tags must be prefixed with `spring.cloud.task` prefix! .Tag Keys |=== |Name | Description |`spring.cloud.task.cf.app.id` _(required)_|App id for CF cloud. |`spring.cloud.task.cf.app.name` _(required)_|App name for CF cloud. |`spring.cloud.task.cf.app.version` _(required)_|App version for CF cloud. |`spring.cloud.task.cf.instance.index` _(required)_|Instance index for CF cloud. |`spring.cloud.task.cf.org.name` _(required)_|Organization Name for CF cloud. |`spring.cloud.task.cf.space.id` _(required)_|Space id for CF cloud. |`spring.cloud.task.cf.space.name` _(required)_|Space name for CF cloud. |`spring.cloud.task.execution.id` _(required)_|Task execution id. |`spring.cloud.task.exit.code` _(required)_|Task exit code. |`spring.cloud.task.external.execution.id` _(required)_|External execution id for task. |`spring.cloud.task.name` _(required)_|Task name measurement. |`spring.cloud.task.parent.execution.id` _(required)_|Task parent execution id. |`spring.cloud.task.status` _(required)_|task status. Can be either success or failure. |=== [[observability-spans-task-runner-observation]] ==== Task Runner Observation Span > Observation created when a task runner is executed. **Span name** `spring.cloud.task.runner` (defined by convention class `org.springframework.cloud.task.configuration.observation.DefaultTaskObservationConvention`). Fully qualified name of the enclosing class `org.springframework.cloud.task.configuration.observation.TaskDocumentedObservation`. IMPORTANT: All tags must be prefixed with `spring.cloud.task` prefix! .Tag Keys |=== |Name | Description |`spring.cloud.task.runner.bean-name` _(required)_|Name of the bean that was executed by Spring Cloud Task. |=== ================================================ FILE: docs/package.json ================================================ { "dependencies": { "antora": "3.2.0-alpha.9", "@antora/atlas-extension": "1.0.0-alpha.5", "@antora/collector-extension": "1.0.2", "@asciidoctor/tabs": "1.0.0-beta.6", "@springio/antora-extensions": "1.14.7", "@springio/asciidoctor-extensions": "1.0.0-alpha.17" } } ================================================ FILE: docs/pom.xml ================================================ 4.0.0 org.springframework.cloud spring-cloud-task-docs org.springframework.cloud spring-cloud-task-parent 5.0.2-SNAPSHOT jar Spring Cloud Task Docs Spring Cloud Task Docs spring-cloud-task ${basedir}/.. spring.cloud.* none 1.0.2 ${maven.multiModuleProjectDirectory}/spring-cloud-task-core/ .* ${maven.multiModuleProjectDirectory}/docs/modules/ROOT/partials/ src/main/asciidoc enable-configuration-properties !disableConfigurationProperties ${project.groupId} spring-cloud-starter-single-step-batch-job ${project.version} ${project.groupId} spring-cloud-starter-task ${project.version} docs src/main/antora/resources/antora-resources true pl.project13.maven git-commit-id-plugin org.apache.maven.plugins maven-dependency-plugin org.codehaus.mojo exec-maven-plugin generate-observability-docs ${generate-docs.phase} java io.micrometer.docs.DocsGeneratorCommand true ${micrometer-docs-generator.inputPath} ${micrometer-docs-generator.inclusionPattern} ${micrometer-docs-generator.outputPath} io.micrometer micrometer-docs-generator ${micrometer-docs-generator.version} jar io.spring.maven.antora antora-component-version-maven-plugin org.antora antora-maven-plugin org.apache.maven.plugins maven-antrun-plugin copying-javadocs prepare-package run maven-deploy-plugin ================================================ FILE: docs/src/main/antora/resources/antora-resources/antora.yml ================================================ version: @antora-component.version@ prerelease: @antora-component.prerelease@ asciidoc: attributes: attribute-missing: 'warn' chomp: 'all' project-root: @maven.multiModuleProjectDirectory@ github-repo: @docs.main@ github-raw: https://raw.githubusercontent.com/spring-cloud/@docs.main@/@github-tag@ github-code: https://github.com/spring-cloud/@docs.main@/tree/@github-tag@ github-issues: https://github.com/spring-cloud/@docs.main@/issues/ github-wiki: https://github.com/spring-cloud/@docs.main@/wiki spring-cloud-version: @project.version@ github-tag: @github-tag@ version-type: @version-type@ docs-url: https://docs.spring.io/@docs.main@/docs/@project.version@ raw-docs-url: https://raw.githubusercontent.com/spring-cloud/@docs.main@/@github-tag@ project-version: @project.version@ project-name: @docs.main@ ================================================ FILE: docs/src/main/asciidoc/.gitignore ================================================ *.html *.css ================================================ FILE: docs/src/main/asciidoc/Guardfile ================================================ require 'asciidoctor' require 'erb' guard 'shell' do watch(/.*\.adoc$/) {|m| Asciidoctor.render_file('index.adoc', \ :in_place => true, \ :safe => Asciidoctor::SafeMode::UNSAFE, \ :attributes => {\ 'source-highlighter' => 'prettify', \ 'icons' => 'font', \ 'linkcss' => 'true', \ 'copycss' => 'true', \ 'doctype' => 'book'}) } end guard 'livereload' do watch(%r{^.+\.(css|js|html)$}) end ================================================ FILE: docs/src/main/asciidoc/README.adoc ================================================ [[spring-cloud-task]] = Spring Cloud Task Is a project centered around the idea of processing on demand. A user is able to develop a “task” that can be deployed, executed and removed on demand, yet the result of the process persists beyond the life of the task for future reporting. [[requirements:]] == Requirements: * Java 17 or Above [[build-main-project:]] == Build Main Project: [source,shell,indent=2] ---- $ ./mvnw clean install ---- [[example:]] == Example: [source,java,indent=2] ---- @SpringBootApplication @EnableTask public class MyApp { @Bean public MyTaskApplication myTask() { return new MyTaskApplication(); } public static void main(String[] args) { SpringApplication.run(MyApp.class); } public static class MyTaskApplication implements ApplicationRunner { @Override public void run(ApplicationArguments args) throws Exception { System.out.println("Hello World"); } } } ---- [[code-of-conduct]] == Code of Conduct This project adheres to the Contributor Covenant link:CODE_OF_CONDUCT.adoc[code of conduct]. By participating, you are expected to uphold this code. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io. [[building-the-project]] == Building the Project This project requires that you invoke the Javadoc engine from the Maven command line. You can do so by appending `javadoc:aggregate` to the rest of your Maven command. For example, to build the entire project, you could use `mvn clean install -DskipTests -P docs`. ================================================ FILE: docs/src/main/asciidoc/index.htmladoc ================================================ include::spring-cloud-task.adoc[] ================================================ FILE: docs/src/main/asciidoc/index.htmlsingleadoc ================================================ = Spring Cloud Task Reference Guide Michael Minella, Glenn Renfro, Jay Bryant include::_attributes.adoc[] // ====================================================================================== (C) 2009-2020 VMware, Inc. All rights reserved. Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically. include::preface.adoc[leveloffset=+1] include::getting-started.adoc[leveloffset=+1] include::features.adoc[leveloffset=+1] include::batch.adoc[leveloffset=+1] include::batch-starter.adoc[leveloffset=+1] include::stream.adoc[leveloffset=+1] include::appendix.adoc[leveloffset=+1] // ====================================================================================== ================================================ FILE: docs/src/main/asciidoc/index.pdfadoc ================================================ include::spring-cloud-task.pdfadoc[] ================================================ FILE: docs/src/main/asciidoc/sagan-index.adoc ================================================ Spring Cloud Task allows a user to develop and run short lived microservices using Spring Cloud and run them locally, in the cloud, even on Spring Cloud Data Flow. Just add `@EnableTask` and run your app as a Spring Boot app (single application context). If you are new to Spring Cloud Task, take a look at our https://docs.spring.io/spring-cloud-task/docs/2.0.0.RELEASE/reference/htmlsingle/#getting-started[Getting Started] docs. ================================================ FILE: docs/src/main/asciidoc/spring-cloud-task.epubadoc ================================================ = Spring Cloud Task Reference Guide Michael Minella, Glenn Renfro, Jay Bryant include::_attributes.adoc[] // ====================================================================================== (C) 2009-2022 VMware, Inc. All rights reserved. Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically. include::preface.adoc[leveloffset=+1] include::getting-started.adoc[leveloffset=+1] include::features.adoc[leveloffset=+1] include::batch.adoc[leveloffset=+1] include::batch-starter.adoc[leveloffset=+1] include::stream.adoc[leveloffset=+1] include::appendix.adoc[leveloffset=+1] // ====================================================================================== ================================================ FILE: docs/src/main/asciidoc/spring-cloud-task.htmlsingleadoc ================================================ = Spring Cloud Task Reference Guide Michael Minella, Glenn Renfro, Jay Bryant include::_attributes.adoc[] // ====================================================================================== (C) 2009-2020 VMware, Inc. All rights reserved. Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically. include::preface.adoc[leveloffset=+1] include::getting-started.adoc[leveloffset=+1] include::features.adoc[leveloffset=+1] include::batch.adoc[leveloffset=+1] include::batch-starter.adoc[leveloffset=+1] include::stream.adoc[leveloffset=+1] include::appendix.adoc[leveloffset=+1] // ====================================================================================== ================================================ FILE: docs/src/main/asciidoc/spring-cloud-task.pdfadoc ================================================ = Spring Cloud Task Reference Guide Michael Minella, Glenn Renfro, Jay Bryant include::_attributes.adoc[] // ====================================================================================== (C) 2009-2022 VMware, Inc. All rights reserved. Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically. include::preface.adoc[leveloffset=+1] include::getting-started.adoc[leveloffset=+1] include::features.adoc[leveloffset=+1] include::batch.adoc[leveloffset=+1] include::batch-starter.adoc[leveloffset=+1] include::stream.adoc[leveloffset=+1] include::appendix.adoc[leveloffset=+1] // ====================================================================================== ================================================ FILE: docs/src/main/javadoc/spring-javadoc.css ================================================ /* Javadoc style sheet */ /* Overall document style */ @import url('resources/fonts/dejavu.css'); body { background-color: #ffffff; color: #353833; font-family: 'DejaVu Sans', Arial, Helvetica, sans-serif; font-size: 14px; margin: 0; } a:link, a:visited { text-decoration: none; color: #4A6782; } a:hover, a:focus { text-decoration: none; color: #bb7a2a; } a:active { text-decoration: none; color: #4A6782; } a[name] { color: #353833; } a[name]:hover { text-decoration: none; color: #353833; } pre { font-family: 'DejaVu Sans Mono', monospace; font-size: 14px; } h1 { font-size: 20px; } h2 { font-size: 18px; } h3 { font-size: 16px; font-style: italic; } h4 { font-size: 13px; } h5 { font-size: 12px; } h6 { font-size: 11px; } ul { list-style-type: disc; } code, tt { font-family: 'DejaVu Sans Mono', monospace; font-size: 14px; padding-top: 4px; margin-top: 8px; line-height: 1.4em; } dt code { font-family: 'DejaVu Sans Mono', monospace; font-size: 14px; padding-top: 4px; } table tr td dt code { font-family: 'DejaVu Sans Mono', monospace; font-size: 14px; vertical-align: top; padding-top: 4px; } sup { font-size: 8px; } /* Document title and Copyright styles */ .clear { clear: both; height: 0px; overflow: hidden; } .aboutLanguage { float: right; padding: 0px 21px; font-size: 11px; z-index: 200; margin-top: -9px; } .legalCopy { margin-left: .5em; } .bar a, .bar a:link, .bar a:visited, .bar a:active { color: #FFFFFF; text-decoration: none; } .bar a:hover, .bar a:focus { color: #bb7a2a; } .tab { background-color: #0066FF; color: #ffffff; padding: 8px; width: 5em; font-weight: bold; } /* Navigation bar styles */ .bar { background-color: #4D7A97; color: #FFFFFF; padding: .8em .5em .4em .8em; height: auto; /*height:1.8em;*/ font-size: 11px; margin: 0; } .topNav { background-color: #4D7A97; color: #FFFFFF; float: left; padding: 0; width: 100%; clear: right; height: 2.8em; padding-top: 10px; overflow: hidden; font-size: 12px; } .bottomNav { margin-top: 10px; background-color: #4D7A97; color: #FFFFFF; float: left; padding: 0; width: 100%; clear: right; height: 2.8em; padding-top: 10px; overflow: hidden; font-size: 12px; } .subNav { background-color: #dee3e9; float: left; width: 100%; overflow: hidden; font-size: 12px; } .subNav div { clear: left; float: left; padding: 0 0 5px 6px; text-transform: uppercase; } ul.navList, ul.subNavList { float: left; margin: 0 25px 0 0; padding: 0; } ul.navList li { list-style: none; float: left; padding: 5px 6px; text-transform: uppercase; } ul.subNavList li { list-style: none; float: left; } .topNav a:link, .topNav a:active, .topNav a:visited, .bottomNav a:link, .bottomNav a:active, .bottomNav a:visited { color: #FFFFFF; text-decoration: none; text-transform: uppercase; } .topNav a:hover, .bottomNav a:hover { text-decoration: none; color: #bb7a2a; text-transform: uppercase; } .navBarCell1Rev { background-color: #F8981D; color: #253441; margin: auto 5px; } .skipNav { position: absolute; top: auto; left: -9999px; overflow: hidden; } /* Page header and footer styles */ .header, .footer { clear: both; margin: 0 20px; padding: 5px 0 0 0; } .indexHeader { margin: 10px; position: relative; } .indexHeader span { margin-right: 15px; } .indexHeader h1 { font-size: 13px; } .title { color: #2c4557; margin: 10px 0; } .subTitle { margin: 5px 0 0 0; } .header ul { margin: 0 0 15px 0; padding: 0; } .footer ul { margin: 20px 0 5px 0; } .header ul li, .footer ul li { list-style: none; font-size: 13px; } /* Heading styles */ div.details ul.blockList ul.blockList ul.blockList li.blockList h4, div.details ul.blockList ul.blockList ul.blockListLast li.blockList h4 { background-color: #dee3e9; border: 1px solid #d0d9e0; margin: 0 0 6px -8px; padding: 7px 5px; } ul.blockList ul.blockList ul.blockList li.blockList h3 { background-color: #dee3e9; border: 1px solid #d0d9e0; margin: 0 0 6px -8px; padding: 7px 5px; } ul.blockList ul.blockList li.blockList h3 { padding: 0; margin: 15px 0; } ul.blockList li.blockList h2 { padding: 0px 0 20px 0; } /* Page layout container styles */ .contentContainer, .sourceContainer, .classUseContainer, .serializedFormContainer, .constantValuesContainer { clear: both; padding: 10px 20px; position: relative; } .indexContainer { margin: 10px; position: relative; font-size: 12px; } .indexContainer h2 { font-size: 13px; padding: 0 0 3px 0; } .indexContainer ul { margin: 0; padding: 0; } .indexContainer ul li { list-style: none; padding-top: 2px; } .contentContainer .description dl dt, .contentContainer .details dl dt, .serializedFormContainer dl dt { font-size: 12px; font-weight: bold; margin: 10px 0 0 0; color: #4E4E4E; } .contentContainer .description dl dd, .contentContainer .details dl dd, .serializedFormContainer dl dd { margin: 5px 0 10px 0px; font-size: 14px; font-family: 'DejaVu Sans Mono', monospace; } .serializedFormContainer dl.nameValue dt { margin-left: 1px; font-size: 1.1em; display: inline; font-weight: bold; } .serializedFormContainer dl.nameValue dd { margin: 0 0 0 1px; font-size: 1.1em; display: inline; } /* List styles */ ul.horizontal li { display: inline; font-size: 0.9em; } ul.inheritance { margin: 0; padding: 0; } ul.inheritance li { display: inline; list-style: none; } ul.inheritance li ul.inheritance { margin-left: 15px; padding-left: 15px; padding-top: 1px; } ul.blockList, ul.blockListLast { margin: 10px 0 10px 0; padding: 0; } ul.blockList li.blockList, ul.blockListLast li.blockList { list-style: none; margin-bottom: 15px; line-height: 1.4; } ul.blockList ul.blockList li.blockList, ul.blockList ul.blockListLast li.blockList { padding: 0px 20px 5px 10px; border: 1px solid #ededed; background-color: #f8f8f8; } ul.blockList ul.blockList ul.blockList li.blockList, ul.blockList ul.blockList ul.blockListLast li.blockList { padding: 0 0 5px 8px; background-color: #ffffff; border: none; } ul.blockList ul.blockList ul.blockList ul.blockList li.blockList { margin-left: 0; padding-left: 0; padding-bottom: 15px; border: none; } ul.blockList ul.blockList ul.blockList ul.blockList li.blockListLast { list-style: none; border-bottom: none; padding-bottom: 0; } table tr td dl, table tr td dl dt, table tr td dl dd { margin-top: 0; margin-bottom: 1px; } /* Table styles */ .overviewSummary, .memberSummary, .typeSummary, .useSummary, .constantsSummary, .deprecatedSummary { width: 100%; border-left: 1px solid #EEE; border-right: 1px solid #EEE; border-bottom: 1px solid #EEE; } .overviewSummary, .memberSummary { padding: 0px; } .overviewSummary caption, .memberSummary caption, .typeSummary caption, .useSummary caption, .constantsSummary caption, .deprecatedSummary caption { position: relative; text-align: left; background-repeat: no-repeat; color: #253441; font-weight: bold; clear: none; overflow: hidden; padding: 0px; padding-top: 10px; padding-left: 1px; margin: 0px; white-space: pre; } .overviewSummary caption a:link, .memberSummary caption a:link, .typeSummary caption a:link, .useSummary caption a:link, .constantsSummary caption a:link, .deprecatedSummary caption a:link, .overviewSummary caption a:hover, .memberSummary caption a:hover, .typeSummary caption a:hover, .useSummary caption a:hover, .constantsSummary caption a:hover, .deprecatedSummary caption a:hover, .overviewSummary caption a:active, .memberSummary caption a:active, .typeSummary caption a:active, .useSummary caption a:active, .constantsSummary caption a:active, .deprecatedSummary caption a:active, .overviewSummary caption a:visited, .memberSummary caption a:visited, .typeSummary caption a:visited, .useSummary caption a:visited, .constantsSummary caption a:visited, .deprecatedSummary caption a:visited { color: #FFFFFF; } .overviewSummary caption span, .memberSummary caption span, .typeSummary caption span, .useSummary caption span, .constantsSummary caption span, .deprecatedSummary caption span { white-space: nowrap; padding-top: 5px; padding-left: 12px; padding-right: 12px; padding-bottom: 7px; display: inline-block; float: left; background-color: #F8981D; border: none; height: 16px; } .memberSummary caption span.activeTableTab span { white-space: nowrap; padding-top: 5px; padding-left: 12px; padding-right: 12px; margin-right: 3px; display: inline-block; float: left; background-color: #F8981D; height: 16px; } .memberSummary caption span.tableTab span { white-space: nowrap; padding-top: 5px; padding-left: 12px; padding-right: 12px; margin-right: 3px; display: inline-block; float: left; background-color: #4D7A97; height: 16px; } .memberSummary caption span.tableTab, .memberSummary caption span.activeTableTab { padding-top: 0px; padding-left: 0px; padding-right: 0px; background-image: none; float: none; display: inline; } .overviewSummary .tabEnd, .memberSummary .tabEnd, .typeSummary .tabEnd, .useSummary .tabEnd, .constantsSummary .tabEnd, .deprecatedSummary .tabEnd { display: none; width: 5px; position: relative; float: left; background-color: #F8981D; } .memberSummary .activeTableTab .tabEnd { display: none; width: 5px; margin-right: 3px; position: relative; float: left; background-color: #F8981D; } .memberSummary .tableTab .tabEnd { display: none; width: 5px; margin-right: 3px; position: relative; background-color: #4D7A97; float: left; } .overviewSummary td, .memberSummary td, .typeSummary td, .useSummary td, .constantsSummary td, .deprecatedSummary td { text-align: left; padding: 0px 0px 12px 10px; width: 100%; } th.colOne, th.colFirst, th.colLast, .useSummary th, .constantsSummary th, td.colOne, td.colFirst, td.colLast, .useSummary td, .constantsSummary td { vertical-align: top; padding-right: 0px; padding-top: 8px; padding-bottom: 3px; } th.colFirst, th.colLast, th.colOne, .constantsSummary th { background: #dee3e9; text-align: left; padding: 8px 3px 3px 7px; } td.colFirst, th.colFirst { white-space: nowrap; font-size: 13px; } td.colLast, th.colLast { font-size: 13px; } td.colOne, th.colOne { font-size: 13px; } .overviewSummary td.colFirst, .overviewSummary th.colFirst, .overviewSummary td.colOne, .overviewSummary th.colOne, .memberSummary td.colFirst, .memberSummary th.colFirst, .memberSummary td.colOne, .memberSummary th.colOne, .typeSummary td.colFirst { width: 25%; vertical-align: top; } td.colOne a:link, td.colOne a:active, td.colOne a:visited, td.colOne a:hover, td.colFirst a:link, td.colFirst a:active, td.colFirst a:visited, td.colFirst a:hover, td.colLast a:link, td.colLast a:active, td.colLast a:visited, td.colLast a:hover, .constantValuesContainer td a:link, .constantValuesContainer td a:active, .constantValuesContainer td a:visited, .constantValuesContainer td a:hover { font-weight: bold; } .tableSubHeadingColor { background-color: #EEEEFF; } .altColor { background-color: #FFFFFF; } .rowColor { background-color: #EEEEEF; } /* Content styles */ .description pre { margin-top: 0; } .deprecatedContent { margin: 0; padding: 10px 0; } .docSummary { padding: 0; } ul.blockList ul.blockList ul.blockList li.blockList h3 { font-style: normal; } div.block { font-size: 14px; font-family: 'DejaVu Serif', Georgia, "Times New Roman", Times, serif; } td.colLast div { padding-top: 0px; } td.colLast a { padding-bottom: 3px; } /* Formatting effect styles */ .sourceLineNo { color: green; padding: 0 30px 0 0; } h1.hidden { visibility: hidden; overflow: hidden; font-size: 10px; } .block { display: block; margin: 3px 10px 2px 0px; color: #474747; } .deprecatedLabel, .descfrmTypeLabel, .memberNameLabel, .memberNameLink, .overrideSpecifyLabel, .packageHierarchyLabel, .paramLabel, .returnLabel, .seeLabel, .simpleTagLabel, .throwsLabel, .typeNameLabel, .typeNameLink { font-weight: bold; } .deprecationComment, .emphasizedPhrase, .interfaceName { font-style: italic; } div.block div.block span.deprecationComment, div.block div.block span.emphasizedPhrase, div.block div.block span.interfaceName { font-style: normal; } div.contentContainer ul.blockList li.blockList h2 { padding-bottom: 0px; } /* Spring */ pre.code { background-color: #F8F8F8; border: 1px solid #CCCCCC; border-radius: 3px 3px 3px 3px; overflow: auto; padding: 10px; margin: 4px 20px 2px 0px; } pre.code code, pre.code code * { font-size: 1em; } pre.code code, pre.code code * { padding: 0 !important; margin: 0 !important; } ================================================ FILE: mvnw ================================================ #!/bin/sh # ---------------------------------------------------------------------------- # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # Apache Maven Wrapper startup batch script, version 3.2.0 # # Required ENV vars: # ------------------ # JAVA_HOME - location of a JDK home dir # # Optional ENV vars # ----------------- # MAVEN_OPTS - parameters passed to the Java VM when running Maven # e.g. to debug Maven itself, use # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 # MAVEN_SKIP_RC - flag to disable loading of mavenrc files # ---------------------------------------------------------------------------- if [ -z "$MAVEN_SKIP_RC" ] ; then if [ -f /usr/local/etc/mavenrc ] ; then . /usr/local/etc/mavenrc fi if [ -f /etc/mavenrc ] ; then . /etc/mavenrc fi if [ -f "$HOME/.mavenrc" ] ; then . "$HOME/.mavenrc" fi fi # OS specific support. $var _must_ be set to either true or false. cygwin=false; darwin=false; mingw=false case "$(uname)" in CYGWIN*) cygwin=true ;; MINGW*) mingw=true;; Darwin*) darwin=true # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home # See https://developer.apple.com/library/mac/qa/qa1170/_index.html if [ -z "$JAVA_HOME" ]; then if [ -x "/usr/libexec/java_home" ]; then JAVA_HOME="$(/usr/libexec/java_home)"; export JAVA_HOME else JAVA_HOME="/Library/Java/Home"; export JAVA_HOME fi fi ;; esac if [ -z "$JAVA_HOME" ] ; then if [ -r /etc/gentoo-release ] ; then JAVA_HOME=$(java-config --jre-home) fi fi # For Cygwin, ensure paths are in UNIX format before anything is touched if $cygwin ; then [ -n "$JAVA_HOME" ] && JAVA_HOME=$(cygpath --unix "$JAVA_HOME") [ -n "$CLASSPATH" ] && CLASSPATH=$(cygpath --path --unix "$CLASSPATH") fi # For Mingw, ensure paths are in UNIX format before anything is touched if $mingw ; then [ -n "$JAVA_HOME" ] && [ -d "$JAVA_HOME" ] && JAVA_HOME="$(cd "$JAVA_HOME" || (echo "cannot cd into $JAVA_HOME."; exit 1); pwd)" fi if [ -z "$JAVA_HOME" ]; then javaExecutable="$(which javac)" if [ -n "$javaExecutable" ] && ! [ "$(expr "\"$javaExecutable\"" : '\([^ ]*\)')" = "no" ]; then # readlink(1) is not available as standard on Solaris 10. readLink=$(which readlink) if [ ! "$(expr "$readLink" : '\([^ ]*\)')" = "no" ]; then if $darwin ; then javaHome="$(dirname "\"$javaExecutable\"")" javaExecutable="$(cd "\"$javaHome\"" && pwd -P)/javac" else javaExecutable="$(readlink -f "\"$javaExecutable\"")" fi javaHome="$(dirname "\"$javaExecutable\"")" javaHome=$(expr "$javaHome" : '\(.*\)/bin') JAVA_HOME="$javaHome" export JAVA_HOME fi fi fi if [ -z "$JAVACMD" ] ; then if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD="$JAVA_HOME/jre/sh/java" else JAVACMD="$JAVA_HOME/bin/java" fi else JAVACMD="$(\unset -f command 2>/dev/null; \command -v java)" fi fi if [ ! -x "$JAVACMD" ] ; then echo "Error: JAVA_HOME is not defined correctly." >&2 echo " We cannot execute $JAVACMD" >&2 exit 1 fi if [ -z "$JAVA_HOME" ] ; then echo "Warning: JAVA_HOME environment variable is not set." fi # traverses directory structure from process work directory to filesystem root # first directory with .mvn subdirectory is considered project base directory find_maven_basedir() { if [ -z "$1" ] then echo "Path not specified to find_maven_basedir" return 1 fi basedir="$1" wdir="$1" while [ "$wdir" != '/' ] ; do if [ -d "$wdir"/.mvn ] ; then basedir=$wdir break fi # workaround for JBEAP-8937 (on Solaris 10/Sparc) if [ -d "${wdir}" ]; then wdir=$(cd "$wdir/.." || exit 1; pwd) fi # end of workaround done printf '%s' "$(cd "$basedir" || exit 1; pwd)" } # concatenates all lines of a file concat_lines() { if [ -f "$1" ]; then # Remove \r in case we run on Windows within Git Bash # and check out the repository with auto CRLF management # enabled. Otherwise, we may read lines that are delimited with # \r\n and produce $'-Xarg\r' rather than -Xarg due to word # splitting rules. tr -s '\r\n' ' ' < "$1" fi } log() { if [ "$MVNW_VERBOSE" = true ]; then printf '%s\n' "$1" fi } BASE_DIR=$(find_maven_basedir "$(dirname "$0")") if [ -z "$BASE_DIR" ]; then exit 1; fi MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}; export MAVEN_PROJECTBASEDIR log "$MAVEN_PROJECTBASEDIR" ########################################################################################## # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central # This allows using the maven wrapper in projects that prohibit checking in binary data. ########################################################################################## wrapperJarPath="$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" if [ -r "$wrapperJarPath" ]; then log "Found $wrapperJarPath" else log "Couldn't find $wrapperJarPath, downloading it ..." if [ -n "$MVNW_REPOURL" ]; then wrapperUrl="$MVNW_REPOURL/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar" else wrapperUrl="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar" fi while IFS="=" read -r key value; do # Remove '\r' from value to allow usage on windows as IFS does not consider '\r' as a separator ( considers space, tab, new line ('\n'), and custom '=' ) safeValue=$(echo "$value" | tr -d '\r') case "$key" in (wrapperUrl) wrapperUrl="$safeValue"; break ;; esac done < "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.properties" log "Downloading from: $wrapperUrl" if $cygwin; then wrapperJarPath=$(cygpath --path --windows "$wrapperJarPath") fi if command -v wget > /dev/null; then log "Found wget ... using wget" [ "$MVNW_VERBOSE" = true ] && QUIET="" || QUIET="--quiet" if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then wget $QUIET "$wrapperUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath" else wget $QUIET --http-user="$MVNW_USERNAME" --http-password="$MVNW_PASSWORD" "$wrapperUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath" fi elif command -v curl > /dev/null; then log "Found curl ... using curl" [ "$MVNW_VERBOSE" = true ] && QUIET="" || QUIET="--silent" if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then curl $QUIET -o "$wrapperJarPath" "$wrapperUrl" -f -L || rm -f "$wrapperJarPath" else curl $QUIET --user "$MVNW_USERNAME:$MVNW_PASSWORD" -o "$wrapperJarPath" "$wrapperUrl" -f -L || rm -f "$wrapperJarPath" fi else log "Falling back to using Java to download" javaSource="$MAVEN_PROJECTBASEDIR/.mvn/wrapper/MavenWrapperDownloader.java" javaClass="$MAVEN_PROJECTBASEDIR/.mvn/wrapper/MavenWrapperDownloader.class" # For Cygwin, switch paths to Windows format before running javac if $cygwin; then javaSource=$(cygpath --path --windows "$javaSource") javaClass=$(cygpath --path --windows "$javaClass") fi if [ -e "$javaSource" ]; then if [ ! -e "$javaClass" ]; then log " - Compiling MavenWrapperDownloader.java ..." ("$JAVA_HOME/bin/javac" "$javaSource") fi if [ -e "$javaClass" ]; then log " - Running MavenWrapperDownloader.java ..." ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$wrapperUrl" "$wrapperJarPath") || rm -f "$wrapperJarPath" fi fi fi fi ########################################################################################## # End of extension ########################################################################################## # If specified, validate the SHA-256 sum of the Maven wrapper jar file wrapperSha256Sum="" while IFS="=" read -r key value; do case "$key" in (wrapperSha256Sum) wrapperSha256Sum=$value; break ;; esac done < "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.properties" if [ -n "$wrapperSha256Sum" ]; then wrapperSha256Result=false if command -v sha256sum > /dev/null; then if echo "$wrapperSha256Sum $wrapperJarPath" | sha256sum -c > /dev/null 2>&1; then wrapperSha256Result=true fi elif command -v shasum > /dev/null; then if echo "$wrapperSha256Sum $wrapperJarPath" | shasum -a 256 -c > /dev/null 2>&1; then wrapperSha256Result=true fi else echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." echo "Please install either command, or disable validation by removing 'wrapperSha256Sum' from your maven-wrapper.properties." exit 1 fi if [ $wrapperSha256Result = false ]; then echo "Error: Failed to validate Maven wrapper SHA-256, your Maven wrapper might be compromised." >&2 echo "Investigate or delete $wrapperJarPath to attempt a clean download." >&2 echo "If you updated your Maven version, you need to update the specified wrapperSha256Sum property." >&2 exit 1 fi fi MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" # For Cygwin, switch paths to Windows format before running java if $cygwin; then [ -n "$JAVA_HOME" ] && JAVA_HOME=$(cygpath --path --windows "$JAVA_HOME") [ -n "$CLASSPATH" ] && CLASSPATH=$(cygpath --path --windows "$CLASSPATH") [ -n "$MAVEN_PROJECTBASEDIR" ] && MAVEN_PROJECTBASEDIR=$(cygpath --path --windows "$MAVEN_PROJECTBASEDIR") fi # Provide a "standardized" way to retrieve the CLI args that will # work with both Windows and non-Windows executions. MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $*" export MAVEN_CMD_LINE_ARGS WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain # shellcheck disable=SC2086 # safe args exec "$JAVACMD" \ $MAVEN_OPTS \ $MAVEN_DEBUG_OPTS \ -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" ================================================ FILE: mvnw.cmd ================================================ @REM ---------------------------------------------------------------------------- @REM Licensed to the Apache Software Foundation (ASF) under one @REM or more contributor license agreements. See the NOTICE file @REM distributed with this work for additional information @REM regarding copyright ownership. The ASF licenses this file @REM to you under the Apache License, Version 2.0 (the @REM "License"); you may not use this file except in compliance @REM with the License. You may obtain a copy of the License at @REM @REM http://www.apache.org/licenses/LICENSE-2.0 @REM @REM Unless required by applicable law or agreed to in writing, @REM software distributed under the License is distributed on an @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY @REM KIND, either express or implied. See the License for the @REM specific language governing permissions and limitations @REM under the License. @REM ---------------------------------------------------------------------------- @REM ---------------------------------------------------------------------------- @REM Apache Maven Wrapper startup batch script, version 3.2.0 @REM @REM Required ENV vars: @REM JAVA_HOME - location of a JDK home dir @REM @REM Optional ENV vars @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven @REM e.g. to debug Maven itself, use @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files @REM ---------------------------------------------------------------------------- @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' @echo off @REM set title of command window title %0 @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% @REM set %HOME% to equivalent of $HOME if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") @REM Execute a user defined script before this one if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre @REM check for pre script, once with legacy .bat ending and once with .cmd ending if exist "%USERPROFILE%\mavenrc_pre.bat" call "%USERPROFILE%\mavenrc_pre.bat" %* if exist "%USERPROFILE%\mavenrc_pre.cmd" call "%USERPROFILE%\mavenrc_pre.cmd" %* :skipRcPre @setlocal set ERROR_CODE=0 @REM To isolate internal variables from possible post scripts, we use another setlocal @setlocal @REM ==== START VALIDATION ==== if not "%JAVA_HOME%" == "" goto OkJHome echo. echo Error: JAVA_HOME not found in your environment. >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error :OkJHome if exist "%JAVA_HOME%\bin\java.exe" goto init echo. echo Error: JAVA_HOME is set to an invalid directory. >&2 echo JAVA_HOME = "%JAVA_HOME%" >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error @REM ==== END VALIDATION ==== :init @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". @REM Fallback to current working directory if not found. set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir set EXEC_DIR=%CD% set WDIR=%EXEC_DIR% :findBaseDir IF EXIST "%WDIR%"\.mvn goto baseDirFound cd .. IF "%WDIR%"=="%CD%" goto baseDirNotFound set WDIR=%CD% goto findBaseDir :baseDirFound set MAVEN_PROJECTBASEDIR=%WDIR% cd "%EXEC_DIR%" goto endDetectBaseDir :baseDirNotFound set MAVEN_PROJECTBASEDIR=%EXEC_DIR% cd "%EXEC_DIR%" :endDetectBaseDir IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig @setlocal EnableExtensions EnableDelayedExpansion for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% :endReadAdditionalConfig SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain set WRAPPER_URL="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar" FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( IF "%%A"=="wrapperUrl" SET WRAPPER_URL=%%B ) @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central @REM This allows using the maven wrapper in projects that prohibit checking in binary data. if exist %WRAPPER_JAR% ( if "%MVNW_VERBOSE%" == "true" ( echo Found %WRAPPER_JAR% ) ) else ( if not "%MVNW_REPOURL%" == "" ( SET WRAPPER_URL="%MVNW_REPOURL%/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar" ) if "%MVNW_VERBOSE%" == "true" ( echo Couldn't find %WRAPPER_JAR%, downloading it ... echo Downloading from: %WRAPPER_URL% ) powershell -Command "&{"^ "$webclient = new-object System.Net.WebClient;"^ "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ "}"^ "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%WRAPPER_URL%', '%WRAPPER_JAR%')"^ "}" if "%MVNW_VERBOSE%" == "true" ( echo Finished downloading %WRAPPER_JAR% ) ) @REM End of extension @REM If specified, validate the SHA-256 sum of the Maven wrapper jar file SET WRAPPER_SHA_256_SUM="" FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( IF "%%A"=="wrapperSha256Sum" SET WRAPPER_SHA_256_SUM=%%B ) IF NOT %WRAPPER_SHA_256_SUM%=="" ( powershell -Command "&{"^ "$hash = (Get-FileHash \"%WRAPPER_JAR%\" -Algorithm SHA256).Hash.ToLower();"^ "If('%WRAPPER_SHA_256_SUM%' -ne $hash){"^ " Write-Output 'Error: Failed to validate Maven wrapper SHA-256, your Maven wrapper might be compromised.';"^ " Write-Output 'Investigate or delete %WRAPPER_JAR% to attempt a clean download.';"^ " Write-Output 'If you updated your Maven version, you need to update the specified wrapperSha256Sum property.';"^ " exit 1;"^ "}"^ "}" if ERRORLEVEL 1 goto error ) @REM Provide a "standardized" way to retrieve the CLI args that will @REM work with both Windows and non-Windows executions. set MAVEN_CMD_LINE_ARGS=%* %MAVEN_JAVA_EXE% ^ %JVM_CONFIG_MAVEN_PROPS% ^ %MAVEN_OPTS% ^ %MAVEN_DEBUG_OPTS% ^ -classpath %WRAPPER_JAR% ^ "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" ^ %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* if ERRORLEVEL 1 goto error goto end :error set ERROR_CODE=1 :end @endlocal & set ERROR_CODE=%ERROR_CODE% if not "%MAVEN_SKIP_RC%"=="" goto skipRcPost @REM check for post script, once with legacy .bat ending and once with .cmd ending if exist "%USERPROFILE%\mavenrc_post.bat" call "%USERPROFILE%\mavenrc_post.bat" if exist "%USERPROFILE%\mavenrc_post.cmd" call "%USERPROFILE%\mavenrc_post.cmd" :skipRcPost @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' if "%MAVEN_BATCH_PAUSE%"=="on" pause if "%MAVEN_TERMINATE_CMD%"=="on" exit %ERROR_CODE% cmd /C exit /B %ERROR_CODE% ================================================ FILE: pom.xml ================================================ 4.0.0 org.springframework.cloud spring-cloud-build 5.0.2-SNAPSHOT org.springframework.cloud spring-cloud-task-parent 5.0.2-SNAPSHOT pom Spring Cloud Task Build Spring Cloud Task Build https://cloud.spring.io/spring-cloud-task/ VMware, Inc. https://www.spring.io Apache License, Version 2.0 https://www.apache.org/licenses/LICENSE-2.0 https://github.com/spring-cloud/spring-cloud-task mminella Michael Minella mminella at vmware.com VMware, Inc. https://www.spring.io Project Lead cppwfs Glenn Renfro grenfro at vmware VMware, Inc. https://www.spring.io 3.2.1 org.springframework.cloud spring-cloud-task-dependencies ${project.version} pom import org.springframework.cloud spring-cloud-starter-stream-rabbit ${spring-cloud-stream-binder-rabbit.version} org.springframework.cloud spring-cloud-stream ${spring-cloud-stream.version} spring-docs https://docs.spring.io/spring-cloud-release/docs/${pom.version}/reference/html/ https://github.com/spring-cloud/spring-cloud-release spring-cloud-task-dependencies spring-cloud-task-core spring-cloud-task-batch spring-cloud-task-stream spring-cloud-starter-task spring-cloud-task-samples spring-cloud-task-integration-tests docs spring-cloud-starter-single-step-batch-job 5.0.2-SNAPSHOT ${spring-cloud-stream.version} 11.0.0 UTF-8 ${project.build.directory}/coverage-reports/jacoco-ut.exec true true true 17 0.0.40 org.codehaus.mojo flatten-maven-plugin org.codehaus.mojo flatten-maven-plugin org.apache.maven.plugins maven-surefire-plugin **/*Tests.java ${surefireArgLine} org.apache.maven.plugins maven-compiler-plugin 17 17 17 17 org.apache.maven.plugins maven-javadoc-plugin 17 all,-missing attach-javadocs prepare-package jar aggregate prepare-package aggregate org.apache.maven.plugins maven-source-plugin attach-sources jar org.jacoco jacoco-maven-plugin pre-unit-test prepare-agent ${project.build.directory}/coverage-reports/jacoco-ut.exec surefireArgLine post-unit-test test report ${project.build.directory}/coverage-reports/jacoco-ut.exec ${project.reporting.outputDirectory}/jacoco-ut 0.8.13 org.apache.maven.plugins maven-checkstyle-plugin io.spring.javaformat spring-javaformat-maven-plugin ${spring-javaformat-maven-plugin.version} validate true validate org.apache.maven.plugins maven-checkstyle-plugin central org.sonatype.central central-publishing-maven-plugin spring-cloud-task-integration-tests spring spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false withoutDockerTests maven-surefire-plugin DockerRequired ================================================ FILE: spring-cloud-starter-single-step-batch-job/pom.xml ================================================ spring-cloud-task-parent org.springframework.cloud 5.0.2-SNAPSHOT 4.0.0 spring-cloud-starter-single-step-batch-job spring-cloud-starter-single-step-batch-job 5.0.2-SNAPSHOT org.springframework.cloud spring-cloud-task-core org.springframework.boot spring-boot-jdbc org.springframework.boot spring-boot-starter-batch org.springframework.batch spring-batch-infrastructure org.springframework.boot spring-boot-configuration-processor true org.springframework.boot spring-boot-configuration-processor true ${spring-boot.version} org.springframework.boot spring-boot-starter-test test org.springframework.boot spring-boot-starter-jdbc org.springframework.batch spring-batch-test test com.h2database h2 test org.springframework.boot spring-boot-amqp true org.springframework.amqp spring-rabbit org.testcontainers testcontainers test org.testcontainers testcontainers-rabbitmq test tools.jackson.core jackson-core tools.jackson.core jackson-databind org.junit.jupiter junit-jupiter test org.junit.jupiter junit-jupiter-engine test org.junit.jupiter junit-jupiter-params test org.springframework.kafka spring-kafka-test test org.springframework.boot spring-boot-kafka org.junit.jupiter junit-jupiter-api test org.junit.platform junit-platform-launcher test org.springframework.cloud spring-cloud-test-support ${spring-cloud-commons.version} test ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/RangeConverter.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure; import org.springframework.batch.infrastructure.item.file.transform.Range; import org.springframework.core.convert.converter.Converter; /** * Converter for taking properties of format {@code start-end} or {@code start} (where * start and end are both integers) and converting them into {@link Range} instances for * configuring a * {@link org.springframework.batch.infrastructure.item.file.FlatFileItemReader}. * * @author Michael Minella * @since 2.3 */ public class RangeConverter implements Converter { @Override public Range convert(String source) { if (source == null) { return null; } String[] columns = source.split("-"); if (columns.length == 1) { int start = Integer.parseInt(columns[0]); return new Range(start); } else if (columns.length == 2) { int start = Integer.parseInt(columns[0]); int end = Integer.parseInt(columns[1]); return new Range(start, end); } else { throw new IllegalArgumentException(String .format("%s is in an illegal format. Ranges must be specified as startIndex-endIndex", source)); } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/SingleStepJobAutoConfiguration.java ================================================ /* * Copyright 2019-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure; import java.util.Map; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.step.Step; import org.springframework.batch.core.step.builder.SimpleStepBuilder; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.infrastructure.item.ItemProcessor; import org.springframework.batch.infrastructure.item.ItemReader; import org.springframework.batch.infrastructure.item.ItemWriter; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureBefore; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.util.Assert; /** * Autoconfiguration to create a single step Spring Batch Job. * * @author Michael Minella * @since 2.3 */ @AutoConfiguration @EnableConfigurationProperties(SingleStepJobProperties.class) @AutoConfigureBefore(BatchAutoConfiguration.class) public class SingleStepJobAutoConfiguration { private SingleStepJobProperties properties; @Autowired PlatformTransactionManager transactionManager; @Autowired JobRepository jobRepository; @Autowired(required = false) private ItemProcessor, Map> itemProcessor; public SingleStepJobAutoConfiguration(SingleStepJobProperties properties, ApplicationContext context) { validateProperties(properties); this.properties = properties; } private void validateProperties(SingleStepJobProperties properties) { Assert.hasText(properties.getJobName(), "A job name is required"); Assert.hasText(properties.getStepName(), "A step name is required"); Assert.notNull(properties.getChunkSize(), "A chunk size is required"); Assert.isTrue(properties.getChunkSize() > 0, "A chunk size greater than zero is required"); } @Bean @ConditionalOnMissingBean @ConditionalOnProperty(prefix = "spring.batch.job", name = "job-name") public Job job(ItemReader> itemReader, ItemWriter> itemWriter) { SimpleStepBuilder, Map> stepBuilder = new StepBuilder( this.properties.getStepName(), this.jobRepository) ., Map>chunk(this.properties.getChunkSize(), this.transactionManager) .reader(itemReader); stepBuilder.processor(this.itemProcessor); Step step = stepBuilder.writer(itemWriter).build(); return new JobBuilder(this.properties.getJobName(), this.jobRepository).start(step).build(); } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/SingleStepJobProperties.java ================================================ /* * Copyright 2019-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure; import org.springframework.boot.context.properties.ConfigurationProperties; /** * Properties to configure the step and job level properties for a single step job. * * @author Michael Minella * @since 2.3 */ @ConfigurationProperties(prefix = "spring.batch.job") public class SingleStepJobProperties { /** * Name of the step in the single step job. */ private String stepName; /** * The number of items to process per transaction or chunk. */ private Integer chunkSize; /** * The name of the job. */ private String jobName; /** * Name of the step in the single step job. * @return name */ public String getStepName() { return stepName; } /** * Set the name of the step. * @param stepName name */ public void setStepName(String stepName) { this.stepName = stepName; } /** * The number of items to process per transaction/chunk. * @return number of items */ public Integer getChunkSize() { return chunkSize; } /** * Set the number of items within a transaction/chunk. * @param chunkSize number of items */ public void setChunkSize(Integer chunkSize) { this.chunkSize = chunkSize; } /** * The name of the job. * @return name */ public String getJobName() { return jobName; } /** * Set the name of the job. * @param jobName name */ public void setJobName(String jobName) { this.jobName = jobName; } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/flatfile/FlatFileItemReaderAutoConfiguration.java ================================================ /* * Copyright 2019-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.flatfile; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; import org.springframework.batch.infrastructure.item.file.LineCallbackHandler; import org.springframework.batch.infrastructure.item.file.LineMapper; import org.springframework.batch.infrastructure.item.file.builder.FlatFileItemReaderBuilder; import org.springframework.batch.infrastructure.item.file.mapping.FieldSetMapper; import org.springframework.batch.infrastructure.item.file.separator.RecordSeparatorPolicy; import org.springframework.batch.infrastructure.item.file.transform.FieldSet; import org.springframework.batch.infrastructure.item.file.transform.LineTokenizer; import org.springframework.batch.infrastructure.item.file.transform.Range; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.task.batch.autoconfigure.RangeConverter; import org.springframework.context.annotation.Bean; /** * Autconfiguration for a {@code FlatFileItemReader}. * * @author Michael Minella * @author Glenn Renfro * @since 2.3 */ @AutoConfiguration @EnableConfigurationProperties(FlatFileItemReaderProperties.class) @AutoConfigureAfter(BatchAutoConfiguration.class) public class FlatFileItemReaderAutoConfiguration { private static final Log logger = LogFactory.getLog(FlatFileItemReaderAutoConfiguration.class); private final FlatFileItemReaderProperties properties; public FlatFileItemReaderAutoConfiguration(FlatFileItemReaderProperties properties) { this.properties = properties; } @Bean @ConditionalOnMissingBean @ConditionalOnProperty(prefix = "spring.batch.job.flatfileitemreader", name = "name") public FlatFileItemReader> itemReader(@Autowired(required = false) LineTokenizer lineTokenizer, @Autowired(required = false) FieldSetMapper> fieldSetMapper, @Autowired(required = false) LineMapper> lineMapper, @Autowired(required = false) LineCallbackHandler skippedLinesCallback, @Autowired(required = false) RecordSeparatorPolicy recordSeparatorPolicy) { FlatFileItemReaderBuilder> mapFlatFileItemReaderBuilder = new FlatFileItemReaderBuilder>() .name(this.properties.getName()) .resource(this.properties.getResource()) .saveState(this.properties.isSaveState()) .maxItemCount(this.properties.getMaxItemCount()) .currentItemCount(this.properties.getCurrentItemCount()) .strict(this.properties.isStrict()) .encoding(this.properties.getEncoding()) .linesToSkip(this.properties.getLinesToSkip()) .comments(this.properties.getComments().toArray(new String[this.properties.getComments().size()])); if (recordSeparatorPolicy != null) { mapFlatFileItemReaderBuilder.recordSeparatorPolicy(recordSeparatorPolicy); } mapFlatFileItemReaderBuilder.fieldSetMapper(fieldSetMapper); mapFlatFileItemReaderBuilder.lineMapper(lineMapper); mapFlatFileItemReaderBuilder.skippedLinesCallback(skippedLinesCallback); if (this.properties.isDelimited()) { mapFlatFileItemReaderBuilder.delimited() .quoteCharacter(this.properties.getQuoteCharacter()) .delimiter(this.properties.getDelimiter()) .includedFields(this.properties.getIncludedFields().toArray(new Integer[0])) .names(this.properties.getNames()) .beanMapperStrict(this.properties.isParsingStrict()) .fieldSetMapper(new MapFieldSetMapper()); } else if (this.properties.isFixedLength()) { RangeConverter rangeConverter = new RangeConverter(); List ranges = new ArrayList<>(); this.properties.getRanges().forEach(range -> ranges.add(rangeConverter.convert(range))); mapFlatFileItemReaderBuilder.fixedLength() .columns(ranges.toArray(new Range[0])) .names(this.properties.getNames()) .fieldSetMapper(new MapFieldSetMapper()) .beanMapperStrict(this.properties.isParsingStrict()); } else { mapFlatFileItemReaderBuilder.lineTokenizer(lineTokenizer); } if (lineTokenizer != null && (this.properties.isDelimited() || this.properties.isFixedLength())) { logger.warn("Custom LineTokenizer bean provided but will be ignored because " + "delimited or fixed-length properties are configured. " + "Remove the custom bean or clear delimited/fixedLength properties."); } return mapFlatFileItemReaderBuilder.build(); } /** * A {@link FieldSetMapper} that takes a {@code FieldSet} and returns the * {@code Map} of its contents. */ public static class MapFieldSetMapper implements FieldSetMapper> { @Override public Map mapFieldSet(FieldSet fieldSet) { Map map = new HashMap<>(); fieldSet.getProperties().forEach((key, value) -> map.put(key.toString(), value)); return map; } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/flatfile/FlatFileItemReaderProperties.java ================================================ /* * Copyright 2019-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.flatfile; import java.util.ArrayList; import java.util.List; import org.springframework.batch.infrastructure.item.file.FlatFileItemReader; import org.springframework.batch.infrastructure.item.file.transform.DelimitedLineTokenizer; import org.springframework.batch.infrastructure.item.file.transform.Range; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.core.io.Resource; /** * Properties to configure a {@code FlatFileItemReader}. * * @author Michael Minella * @since 2.3 */ @ConfigurationProperties(prefix = "spring.batch.job.flatfileitemreader") public class FlatFileItemReaderProperties { /** * Determines whether the state of the reader is persisted. Default is {@code true}. */ private boolean saveState = true; /** * The name used to calculate the key within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext}. Required if * {@link #setSaveState} is set to {@code true}. */ private String name; /** * Configure the maximum number of items to be read. */ private int maxItemCount = Integer.MAX_VALUE; /** * Index for the current item. Also used on restarts to indicate where to start from. */ private int currentItemCount = 0; /** * A list of {@code String} elements used to indicate which records are comments. */ private List comments = new ArrayList<>(); /** * The {@link Resource} to be used as input. */ private Resource resource; /** * Configure whether the reader should be in strict mode (require the input * {@link Resource} to exist). */ private boolean strict = true; /** * Configure the encoding used by the reader to read the input source. The default * value is {@link FlatFileItemReader#DEFAULT_CHARSET}. */ private String encoding = FlatFileItemReader.DEFAULT_CHARSET; /** * The number of lines to skip at the beginning of reading the file. */ private int linesToSkip = 0; /** * Indicates that a {@link DelimitedLineTokenizer} should be used to parse each line. */ private boolean delimited = false; /** * Define the delimiter for the file. */ private String delimiter = DelimitedLineTokenizer.DELIMITER_COMMA; /** * Define the character used to quote fields. */ private char quoteCharacter = DelimitedLineTokenizer.DEFAULT_QUOTE_CHARACTER; /** * A list of indices of the fields within a delimited file to be included. */ private List includedFields = new ArrayList<>(); /** * Indicates that a * {@link org.springframework.batch.infrastructure.item.file.transform.FixedLengthTokenizer} * should be used to parse the records in the file. */ private boolean fixedLength = false; /** * The column ranges to be used to parse a fixed width file. */ private List ranges = new ArrayList<>(); /** * The names of the fields to be parsed from the file. */ private String[] names; /** * Indicates whether the number of tokens must match the number of configured fields. */ private boolean parsingStrict = true; /** * Returns the configured value of if the state of the reader will be persisted. * @return true if the state will be persisted */ public boolean isSaveState() { return this.saveState; } /** * Configure if the state of the * {@link org.springframework.batch.infrastructure.item.ItemStreamSupport} should be * persisted within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext} for restart * purposes. * @param saveState defaults to true */ public void setSaveState(boolean saveState) { this.saveState = saveState; } /** * Returns the configured value of the name used to calculate {@code ExecutionContext} * keys. * @return the name */ public String getName() { return this.name; } /** * The name used to calculate the key within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext}. Required if * {@link #setSaveState} is set to true. * @param name name of the reader instance * @see org.springframework.batch.infrastructure.item.ItemStreamSupport#setName(String) */ public void setName(String name) { this.name = name; } /** * The maximum number of items to be read. * @return the configured number of items, defaults to Integer.MAX_VALUE */ public int getMaxItemCount() { return this.maxItemCount; } /** * Configure the max number of items to be read. * @param maxItemCount the max items to be read * @see org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) */ public void setMaxItemCount(int maxItemCount) { this.maxItemCount = maxItemCount; } /** * Provides the index of the current item. * @return item index */ public int getCurrentItemCount() { return this.currentItemCount; } /** * Index for the current item. Also used on restarts to indicate where to start from. * @param currentItemCount current index * @see org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) */ public void setCurrentItemCount(int currentItemCount) { this.currentItemCount = currentItemCount; } /** * List of {@code String} values used to indicate what records are comments. * @return list of comment indicators */ public List getComments() { return this.comments; } /** * Takes a list of {@code String} elements used to indicate what records are comments. * @param comments strings used to indicate commented lines */ public void setComments(List comments) { this.comments = comments; } /** * The input file for the {@code FlatFileItemReader}. * @return a Resource */ public Resource getResource() { return this.resource; } /** * The {@link Resource} to be used as input. * @param resource the input to the reader. * @see FlatFileItemReader#setResource(Resource) */ public void setResource(Resource resource) { this.resource = resource; } /** * Returns true if a missing input file is considered an error. * @return true if the input file is required. */ public boolean isStrict() { return this.strict; } /** * Configure if the reader should be in strict mode (require the input * {@link Resource} to exist). * @param strict true if the input file is required to exist. * @see FlatFileItemReader#setStrict(boolean) */ public void setStrict(boolean strict) { this.strict = strict; } /** * Returns the encoding for the input file. Defaults to * {@code FlatFileItemReader#DEFAULT_CHARSET}. * @return the configured encoding */ public String getEncoding() { return this.encoding; } /** * Configure the encoding used by the reader to read the input source. Default value * is {@link FlatFileItemReader#DEFAULT_CHARSET}. * @param encoding to use to read the input source. * @see FlatFileItemReader#setEncoding(String) */ public void setEncoding(String encoding) { this.encoding = encoding; } /** * Number of lines to skip when reading the input file. * @return number of lines */ public int getLinesToSkip() { return this.linesToSkip; } /** * The number of lines to skip at the beginning of reading the file. * @param linesToSkip number of lines to be skipped. * @see FlatFileItemReader#setLinesToSkip(int) */ public void setLinesToSkip(int linesToSkip) { this.linesToSkip = linesToSkip; } /** * Indicates if the input file is a delimited file or not. * @return true if the file is delimited */ public boolean isDelimited() { return this.delimited; } /** * Indicates that a {@link DelimitedLineTokenizer} should be used to parse each line. * @param delimited true if the file is a delimited file */ public void setDelimited(boolean delimited) { this.delimited = delimited; } /** * The {@code String} used to divide the record into fields. * @return the delimiter */ public String getDelimiter() { return this.delimiter; } /** * Define the delimiter for the file. * @param delimiter String used as a delimiter between fields. * @see DelimitedLineTokenizer#setDelimiter(String) */ public void setDelimiter(String delimiter) { this.delimiter = delimiter; } /** * The char used to indicate that a field is quoted. * @return the quote char */ public char getQuoteCharacter() { return this.quoteCharacter; } /** * Define the character used to quote fields. * @param quoteCharacter char used to define quoted fields * @see DelimitedLineTokenizer#setQuoteCharacter(char) */ public void setQuoteCharacter(char quoteCharacter) { this.quoteCharacter = quoteCharacter; } /** * A {@code List} of indices indicating what fields to include. * @return list of indices */ public List getIncludedFields() { return this.includedFields; } /** * A list of indices of the fields within a delimited file to be included. * @param includedFields indices of the fields * @see DelimitedLineTokenizer#setIncludedFields(int[]) */ public void setIncludedFields(List includedFields) { this.includedFields = includedFields; } /** * Indicates that a file contains records with fixed length columns. * @return true if the file is parsed using column indices */ public boolean isFixedLength() { return this.fixedLength; } /** * Indicates that a * {@link org.springframework.batch.infrastructure.item.file.transform.FixedLengthTokenizer} * should be used to parse the records in the file. * @param fixedLength true if the records should be tokenized by column index */ public void setFixedLength(boolean fixedLength) { this.fixedLength = fixedLength; } /** * The column ranges to be used to parsed a fixed width file. * @return a list of {@link Range} instances */ public List getRanges() { return this.ranges; } /** * Column ranges for each field. * @param ranges list of ranges in start-end format (end is optional) */ public void setRanges(List ranges) { this.ranges = ranges; } /** * Names of each column. * @return names */ public String[] getNames() { return this.names; } /** * The names of the fields to be parsed from the file. * @param names names of fields */ public void setNames(String[] names) { this.names = names; } /** * Indicates if the number of tokens must match the number of configured fields. * @return true if they must match */ public boolean isParsingStrict() { return this.parsingStrict; } /** * Indicates if the number of tokens must match the number of configured fields. * @param parsingStrict true if they must match */ public void setParsingStrict(boolean parsingStrict) { this.parsingStrict = parsingStrict; } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/flatfile/FlatFileItemWriterAutoConfiguration.java ================================================ /* * Copyright 2019-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.flatfile; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.springframework.batch.infrastructure.item.file.FlatFileFooterCallback; import org.springframework.batch.infrastructure.item.file.FlatFileHeaderCallback; import org.springframework.batch.infrastructure.item.file.FlatFileItemWriter; import org.springframework.batch.infrastructure.item.file.builder.FlatFileItemWriterBuilder; import org.springframework.batch.infrastructure.item.file.transform.FieldExtractor; import org.springframework.batch.infrastructure.item.file.transform.LineAggregator; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.core.io.WritableResource; /** * Autoconfiguration for a {@code FlatFileItemWriter}. * * @author Michael Minella * @since 2.3 */ @AutoConfiguration @EnableConfigurationProperties(FlatFileItemWriterProperties.class) @AutoConfigureAfter(BatchAutoConfiguration.class) public class FlatFileItemWriterAutoConfiguration { private FlatFileItemWriterProperties properties; @Autowired(required = false) private LineAggregator> lineAggregator; @Autowired(required = false) private FieldExtractor> fieldExtractor; @Autowired(required = false) private FlatFileHeaderCallback headerCallback; @Autowired(required = false) private FlatFileFooterCallback footerCallback; public FlatFileItemWriterAutoConfiguration(FlatFileItemWriterProperties properties) { this.properties = properties; } @Bean @ConditionalOnMissingBean @ConditionalOnProperty(prefix = "spring.batch.job.flatfileitemwriter", name = "name") public FlatFileItemWriter> itemWriter() { if (this.properties.isDelimited() && this.properties.isFormatted()) { throw new IllegalStateException("An output file must be either delimited or formatted or a custom " + "LineAggregator must be provided. Your current configuration specifies both delimited and formatted"); } else if ((this.properties.isFormatted() || this.properties.isDelimited()) && this.lineAggregator != null) { throw new IllegalStateException( "A LineAggregator must be configured if the " + "output is not formatted or delimited"); } FlatFileItemWriterBuilder> builder = new FlatFileItemWriterBuilder>() .name(this.properties.getName()) .resource((WritableResource) this.properties.getResource()) .append(this.properties.isAppend()) .encoding(this.properties.getEncoding()) .forceSync(this.properties.isForceSync()) .lineSeparator(this.properties.getLineSeparator()) .saveState(this.properties.isSaveState()) .shouldDeleteIfEmpty(this.properties.isShouldDeleteIfEmpty()) .shouldDeleteIfExists(this.properties.isShouldDeleteIfExists()) .transactional(this.properties.isTransactional()) .headerCallback(this.headerCallback) .footerCallback(this.footerCallback); if (this.properties.isDelimited()) { FlatFileItemWriterBuilder.DelimitedBuilder> delimitedBuilder = builder.delimited() .delimiter(this.properties.getDelimiter()); if (this.fieldExtractor != null) { delimitedBuilder.fieldExtractor(this.fieldExtractor); } else { delimitedBuilder.fieldExtractor(new MapFieldExtractor(this.properties.getNames())); } } else if (this.properties.isFormatted()) { FlatFileItemWriterBuilder.FormattedBuilder> formattedBuilder = builder.formatted() .format(this.properties.getFormat()) .locale(this.properties.getLocale()) .maximumLength(this.properties.getMaximumLength()) .minimumLength(this.properties.getMinimumLength()); if (this.fieldExtractor != null) { formattedBuilder.fieldExtractor(this.fieldExtractor); } else { formattedBuilder.fieldExtractor(new MapFieldExtractor(this.properties.getNames())); } } else if (this.lineAggregator != null) { builder.lineAggregator(this.lineAggregator); } return builder.build(); } /** * A {@code FieldExtractor} that converts a {@code Map} to the ordered * {@code Object[]} required to populate an output record. */ public static class MapFieldExtractor implements FieldExtractor> { private String[] names; public MapFieldExtractor(String[] names) { this.names = names; } @Override public Object[] extract(Map item) { List fields = new ArrayList<>(item.size()); for (String name : this.names) { fields.add(item.get(name)); } return fields.toArray(); } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/flatfile/FlatFileItemWriterProperties.java ================================================ /* * Copyright 2019-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.flatfile; import java.util.Locale; import org.springframework.batch.infrastructure.item.file.FlatFileItemWriter; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.core.io.Resource; /** * Properties for configuring a {@code FlatFileItemWriter}. * * @author Michael Minella * @since 2.3 */ @ConfigurationProperties(prefix = "spring.batch.job.flatfileitemwriter") public class FlatFileItemWriterProperties { /** * The {@link Resource} to be used as output. */ private Resource resource; /** * Configure the use of the {@code DelimitedLineAggregator} to generate the output per * item. Default is {@code false}. */ private boolean delimited; /** * Indicates to use a {@code FormatterLineAggregator} to generate the output per item. * Default is {@code false}. */ private boolean formatted; /** * Configure the format the {@code FormatterLineAggregator} uses for each item. */ private String format; /** * Configure the {@code Locale} to use when generating the output. */ private Locale locale = Locale.getDefault(); /** * Configure the maximum record length. If 0, the size is unbounded. */ private int maximumLength = 0; /** * Configure the minimum record length. */ private int minimumLength = 0; /** * Configure the {@code String} used to delimit the fields in the output file. */ private String delimiter = ","; /** * File encoding for the output file. Defaults to * {@code FlatFileItemWriter.DEFAULT_CHARSET}) */ private String encoding = FlatFileItemWriter.DEFAULT_CHARSET; /** * A flag indicating that changes should be force-synced to disk on flush. Defaults to * {@code false}. */ private boolean forceSync = false; /** * Names of the fields to be extracted into the output. */ private String[] names; /** * Configure if the output file is found if it should be appended to. Defaults to * {@code false}. */ private boolean append = false; /** * String used to separate lines in output. Defaults to the {@code System} property * {@code line.separator}. */ private String lineSeparator = FlatFileItemWriter.DEFAULT_LINE_SEPARATOR; /** * The name used to calculate the key within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext}. Required if * {@link #setSaveState} is set to {@code true}. */ private String name; /** * Returns the configured value of whether the state of the reader is persisted. */ private boolean saveState = true; /** * Indicates whether the output file should be deleted if no output was written to it. * Defaults to {@code false}. */ private boolean shouldDeleteIfEmpty = false; /** * Indicates whether an existing output file should be deleted on startup. Defaults to * {@code true}. */ private boolean shouldDeleteIfExists = true; /** * Indicates whether flushing the buffer should be delayed while a transaction is * active. Defaults to {@code true}. */ private boolean transactional = FlatFileItemWriter.DEFAULT_TRANSACTIONAL; /** * Returns the configured value of if the state of the reader will be persisted. * @return true if the state will be persisted */ public boolean isSaveState() { return this.saveState; } /** * Configure if the state of the * {@link org.springframework.batch.infrastructure.item.ItemStreamSupport} should be * persisted within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext} for restart * purposes. * @param saveState defaults to true */ public void setSaveState(boolean saveState) { this.saveState = saveState; } /** * Returns the configured value of the name used to calculate {@code ExecutionContext} * keys. * @return the name */ public String getName() { return this.name; } /** * The name used to calculate the key within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext}. Required if * {@link #setSaveState} is set to true. * @param name name of the reader instance * @see org.springframework.batch.infrastructure.item.ItemStreamSupport#setName(String) */ public void setName(String name) { this.name = name; } /** * The output file for the {@code FlatFileItemWriter}. * @return a {@code Resource} */ public Resource getResource() { return this.resource; } /** * The {@link Resource} to be used as output. * @param resource the input to the reader. * @see FlatFileItemWriter#setResource */ public void setResource(Resource resource) { this.resource = resource; } /** * Indicates of the output will be delimited by a configured string (, by default). * @return true if the output file is a delimited file */ public boolean isDelimited() { return delimited; } /** * Configure the use of the {@code DelimitedLineAggregator} to generate the output per * item. * @param delimited indicator if the file will be delimited or not */ public void setDelimited(boolean delimited) { this.delimited = delimited; } /** * When a file is delimited, this {@code String} will be used as the delimiter between * fields. * @return delimiter */ public String getDelimiter() { return delimiter; } /** * Configure the {@code String} used to delimit the fields in the output file. * @param delimiter {@code String} used to delimit the fields of the output file. */ public void setDelimiter(String delimiter) { this.delimiter = delimiter; } /** * Names of the fields to be extracted into the output. * @return An array of field names */ public String[] getNames() { return names; } /** * Provide an ordered array of field names used to generate the output of a file. * @param names An array of field names */ public void setNames(String[] names) { this.names = names; } /** * True if an output file is found and should be added onto instead of * replaced/deleted. False by default. * @return appending indicator */ public boolean isAppend() { return append; } /** * Configure if the output file is found if it should be appended to. Defaults to * false. * @param append true if the output file should be appended onto if found. */ public void setAppend(boolean append) { this.append = append; } /** * Indicates that the output file will use String formatting to generate the output. * @return true if the file will contain formatted records defaults to true */ public boolean isFormatted() { return formatted; } /** * Indicates to use a {@code FormatterLineAggregator} to generate the output per item. * @param formatted true if the output should be formatted via the * {@code FormatterLineAggregator} */ public void setFormatted(boolean formatted) { this.formatted = formatted; } /** * File encoding for the output file. * @return the configured encoding for the output file (Defaults to * {@code FlatFileItemWriter.DEFAULT_CHARSET}) */ public String getEncoding() { return encoding; } /** * Configure encoding of the output file. * @param encoding output encoding */ public void setEncoding(String encoding) { this.encoding = encoding; } /** * A flag indicating that changes should be force-synced to disk on flush. Defaults to * false. * @return The current instance of the builder. */ public boolean isForceSync() { return forceSync; } /** * A flag indicating that changes should be force-synced to disk on flush. Defaults to * false. * @param forceSync value to set the flag to */ public void setForceSync(boolean forceSync) { this.forceSync = forceSync; } /** * String used to separate lines in output. Defaults to the System property * line.separator. * @return the separator string */ public String getLineSeparator() { return lineSeparator; } /** * Configure the {@code String} used to separate each line. * @param lineSeparator defaults to System's line.separator property */ public void setLineSeparator(String lineSeparator) { this.lineSeparator = lineSeparator; } /** * Indicates if the output file should be deleted if no output was written to it. * Defaults to false. * @return true if a file that is empty at the end of the step should be deleted. */ public boolean isShouldDeleteIfEmpty() { return shouldDeleteIfEmpty; } /** * Configure if an empty output file should be deleted once the step is complete. * Defaults to false. * @param shouldDeleteIfEmpty true if the file should be deleted if no items have been * written to it. */ public void setShouldDeleteIfEmpty(boolean shouldDeleteIfEmpty) { this.shouldDeleteIfEmpty = shouldDeleteIfEmpty; } /** * Indicates if an existing output file should be deleted on startup. Defaults to * true. * @return if an existing output file should be deleted. */ public boolean isShouldDeleteIfExists() { return shouldDeleteIfExists; } /** * Configures if an existing output file should be deleted on the start of the step. * Defaults to true. * @param shouldDeleteIfExists if true and an output file of a previous run is found, * it will be deleted. */ public void setShouldDeleteIfExists(boolean shouldDeleteIfExists) { this.shouldDeleteIfExists = shouldDeleteIfExists; } /** * Indicates if flushing the buffer should be delayed while a transaction is active. * Defaults to true. * @return flag indicating if flushing should be delayed during a transaction */ public boolean isTransactional() { return transactional; } /** * Configure if output should not be flushed to disk during an active transaction. * @param transactional defaults to true */ public void setTransactional(boolean transactional) { this.transactional = transactional; } /** * Format used with the {@code FormatterLineAggregator}. * @return the format for each item's output. */ public String getFormat() { return format; } /** * Configure the format the {@code FormatterLineAggregator} will use for each item. * @param format the format for each item's output. */ public void setFormat(String format) { this.format = format; } /** * The {@code Locale} used when generating the output file. * @return configured {@code Locale}. Defaults to {@code Locale.getDefault()} */ public Locale getLocale() { return locale; } /** * Configure the {@code Locale} to use when generating the output. * @param locale the configured {@code Locale} */ public void setLocale(Locale locale) { this.locale = locale; } /** * The longest a record is allowed to be. If 0, the maximum is unlimited. * @return the max record length allowed. Defaults to 0. */ public int getMaximumLength() { return maximumLength; } /** * Configure the maximum record length. If 0, the size is unbounded. * @param maximumLength the maximum record length allowed. */ public void setMaximumLength(int maximumLength) { this.maximumLength = maximumLength; } /** * The minimum record length. * @return the minimum record length allowed. */ public int getMinimumLength() { return minimumLength; } /** * Configure the minimum record length. * @param minimumLength the minimum record length. */ public void setMinimumLength(int minimumLength) { this.minimumLength = minimumLength; } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/flatfile/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Auto-configuration classes for flat file item readers and writers in single-step batch * jobs. */ package org.springframework.cloud.task.batch.autoconfigure.flatfile; ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/jdbc/JDBCSingleStepDataSourceAutoConfiguration.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.jdbc; import javax.sql.DataSource; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.jdbc.autoconfigure.DataSourceProperties; import org.springframework.cloud.task.configuration.DefaultTaskConfigurer; import org.springframework.cloud.task.configuration.TaskConfigurer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Primary; /** * Establishes the default {@link DataSource} for the Task when creating a * {@link DataSource} for * {@link org.springframework.batch.infrastructure.item.database.JdbcCursorItemReader} or * {@link org.springframework.batch.infrastructure.item.database.JdbcBatchItemWriter}. * * @author Glenn Renfro * @since 3.0 */ class JDBCSingleStepDataSourceAutoConfiguration { @ConditionalOnMissingBean @Bean public TaskConfigurer myTaskConfigurer(DataSource dataSource) { return new DefaultTaskConfigurer(dataSource); } @ConditionalOnProperty(prefix = "spring.batch.job.jdbcsinglestep.datasource", name = "enable", havingValue = "true", matchIfMissing = true) @ConditionalOnMissingBean(name = "springDataSourceProperties") @Bean(name = "springDataSourceProperties") @ConfigurationProperties("spring.datasource") @Primary public DataSourceProperties springDataSourceProperties() { return new DataSourceProperties(); } @ConditionalOnProperty(prefix = "spring.batch.job.jdbcsinglestep.datasource", name = "enable", havingValue = "true", matchIfMissing = true) @Bean(name = "springDataSource") @Primary public DataSource dataSource( @Qualifier("springDataSourceProperties") DataSourceProperties springDataSourceProperties) { DataSource dataSource = springDataSourceProperties.initializeDataSourceBuilder().build(); return dataSource; } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/jdbc/JdbcBatchItemWriterAutoConfiguration.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.jdbc; import java.util.Map; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.batch.infrastructure.item.database.ItemPreparedStatementSetter; import org.springframework.batch.infrastructure.item.database.ItemSqlParameterSourceProvider; import org.springframework.batch.infrastructure.item.database.JdbcBatchItemWriter; import org.springframework.batch.infrastructure.item.database.builder.JdbcBatchItemWriterBuilder; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.jdbc.autoconfigure.DataSourceProperties; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; /** * Autconfiguration for a {@code JdbcBatchItemWriter}. * * @author Glenn Renfro * @author Michael Minella * @since 2.3 */ @AutoConfiguration @EnableConfigurationProperties(JdbcBatchItemWriterProperties.class) @AutoConfigureAfter(BatchAutoConfiguration.class) @Import(JDBCSingleStepDataSourceAutoConfiguration.class) public class JdbcBatchItemWriterAutoConfiguration { private static final Log logger = LogFactory.getLog(JdbcBatchItemWriterAutoConfiguration.class); @Autowired(required = false) private ItemPreparedStatementSetter itemPreparedStatementSetter; @Autowired(required = false) private ItemSqlParameterSourceProvider itemSqlParameterSourceProvider; @Autowired ApplicationContext applicationContext; private JdbcBatchItemWriterProperties properties; private DataSource dataSource; public JdbcBatchItemWriterAutoConfiguration(DataSource dataSource, JdbcBatchItemWriterProperties properties) { this.dataSource = dataSource; this.properties = properties; } @Bean @ConditionalOnMissingBean @ConditionalOnProperty(prefix = "spring.batch.job.jdbcbatchitemwriter", name = "name") public JdbcBatchItemWriter> itemWriter() { DataSource writerDataSource = this.dataSource; try { writerDataSource = this.applicationContext.getBean("jdbcBatchItemWriterSpringDataSource", DataSource.class); } catch (Exception ex) { logger.info("Using Default Data Source for the JdbcBatchItemWriter"); } JdbcBatchItemWriterBuilder> jdbcBatchItemWriterBuilder = new JdbcBatchItemWriterBuilder>() .dataSource(writerDataSource) .sql(this.properties.getSql()); if (this.itemPreparedStatementSetter != null) { jdbcBatchItemWriterBuilder.itemPreparedStatementSetter(this.itemPreparedStatementSetter); } else if (this.itemSqlParameterSourceProvider != null) { jdbcBatchItemWriterBuilder.itemSqlParameterSourceProvider(this.itemSqlParameterSourceProvider); } else { jdbcBatchItemWriterBuilder.columnMapped(); } jdbcBatchItemWriterBuilder.assertUpdates(this.properties.isAssertUpdates()); return jdbcBatchItemWriterBuilder.build(); } @ConditionalOnProperty(prefix = "spring.batch.job.jdbcbatchitemwriter.datasource", name = "enable", havingValue = "true") @Bean(name = "jdbcBatchItemWriterDataSourceProperties") @ConfigurationProperties("jdbcbatchitemwriter.datasource") public DataSourceProperties jdbcBatchItemWriterDataSourceProperties() { return new DataSourceProperties(); } @ConditionalOnProperty(prefix = "spring.batch.job.jdbcbatchitemwriter.datasource", name = "enable", havingValue = "true") @Bean(name = "jdbcBatchItemWriterSpringDataSource") public DataSource writerDataSource( @Qualifier("jdbcBatchItemWriterDataSourceProperties") DataSourceProperties writerDataSourceProperties) { DataSource result = writerDataSourceProperties.initializeDataSourceBuilder().build(); return result; } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/jdbc/JdbcBatchItemWriterProperties.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.jdbc; import org.springframework.boot.context.properties.ConfigurationProperties; /** * Properties to configure a {@code JdbcBatchItemWriter}. * * @author Glenn Renfro * @since 2.3 */ @ConfigurationProperties(prefix = "spring.batch.job.jdbcbatchitemwriter") public class JdbcBatchItemWriterProperties { /** * The name used to calculate the key within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext}. */ private String name; /** * The SQL statement to be used to update the database. */ private String sql; /** * If set to {@code true}, confirms that every insert results in the update of at * least one row in the database. Defaults to {@code true}. */ private boolean assertUpdates = true; /** * @return The current sql statement used to update the database. */ public String getSql() { return sql; } /** * Sets the sql statement to be used to update the database. * @param sql the sql statement to be used. */ public void setSql(String sql) { this.sql = sql; } /** * @return if returns true then each insert will be confirmed to have at least one * insert in the database. */ public boolean isAssertUpdates() { return assertUpdates; } /** * If set to true, confirms that every insert results in the update of at least one * row in the database. Defaults to True */ public void setAssertUpdates(boolean assertUpdates) { this.assertUpdates = assertUpdates; } /** * Returns the configured value of the name used to calculate {@code ExecutionContext} * keys. * @return the name */ public String getName() { return name; } /** * The name used to calculate the key within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext}. * @param name name of the writer instance * @see org.springframework.batch.infrastructure.item.ItemStreamSupport#setName(String) */ public void setName(String name) { this.name = name; } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/jdbc/JdbcCursorItemReaderAutoConfiguration.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.jdbc; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.batch.infrastructure.item.database.JdbcCursorItemReader; import org.springframework.batch.infrastructure.item.database.builder.JdbcCursorItemReaderBuilder; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.jdbc.autoconfigure.DataSourceProperties; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; import org.springframework.jdbc.core.PreparedStatementSetter; import org.springframework.jdbc.core.RowMapper; /** * @author Michael Minella * @author Glenn Renfro * @since 2.3 */ @AutoConfiguration @EnableConfigurationProperties(JdbcCursorItemReaderProperties.class) @AutoConfigureAfter(BatchAutoConfiguration.class) @ConditionalOnProperty(prefix = "spring.batch.job.jdbccursoritemreader", name = "name") @Import(JDBCSingleStepDataSourceAutoConfiguration.class) public class JdbcCursorItemReaderAutoConfiguration { private static final Log logger = LogFactory.getLog(JdbcCursorItemReaderAutoConfiguration.class); @Autowired ApplicationContext applicationContext; private final JdbcCursorItemReaderProperties properties; private final DataSource dataSource; public JdbcCursorItemReaderAutoConfiguration(JdbcCursorItemReaderProperties properties, DataSource dataSource) { this.properties = properties; this.dataSource = dataSource; } @Bean @ConditionalOnMissingBean public JdbcCursorItemReader> itemReader( @Autowired(required = false) RowMapper> rowMapper, @Autowired(required = false) PreparedStatementSetter preparedStatementSetter) { DataSource readerDataSource = this.dataSource; try { readerDataSource = this.applicationContext.getBean("jdbcCursorItemReaderSpringDataSource", DataSource.class); } catch (Exception e) { logger.info("Using Default Data Source for the JdbcCursorItemReader"); } return new JdbcCursorItemReaderBuilder>().name(this.properties.getName()) .currentItemCount(this.properties.getCurrentItemCount()) .dataSource(readerDataSource) .driverSupportsAbsolute(this.properties.isDriverSupportsAbsolute()) .fetchSize(this.properties.getFetchSize()) .ignoreWarnings(this.properties.isIgnoreWarnings()) .maxItemCount(this.properties.getMaxItemCount()) .maxRows(this.properties.getMaxRows()) .queryTimeout(this.properties.getQueryTimeout()) .saveState(this.properties.isSaveState()) .sql(this.properties.getSql()) .rowMapper(rowMapper) .preparedStatementSetter(preparedStatementSetter) .verifyCursorPosition(this.properties.isVerifyCursorPosition()) .useSharedExtendedConnection(this.properties.isUseSharedExtendedConnection()) .build(); } @Bean @ConditionalOnMissingBean public RowMapper> rowMapper() { return new MapRowMapper(); } @ConditionalOnProperty(prefix = "spring.batch.job.jdbccursoritemreader.datasource", name = "enable", havingValue = "true") @Bean(name = "jdbcCursorItemReaderDataSourceProperties") @ConfigurationProperties("jdbccursoritemreader.datasource") public DataSourceProperties jdbcCursorItemReaderDataSourceProperties() { return new DataSourceProperties(); } @ConditionalOnProperty(prefix = "spring.batch.job.jdbccursoritemreader.datasource", name = "enable", havingValue = "true") @Bean(name = "jdbcCursorItemReaderSpringDataSource") public DataSource readerDataSource( @Qualifier("jdbcCursorItemReaderDataSourceProperties") DataSourceProperties readerDataSourceProperties) { DataSource result = readerDataSourceProperties.initializeDataSourceBuilder().build(); return result; } public static class MapRowMapper implements RowMapper> { @Override public Map mapRow(ResultSet rs, int rowNum) throws SQLException { Map item = new HashMap<>(rs.getMetaData().getColumnCount()); for (int i = 1; i <= rs.getMetaData().getColumnCount(); i++) { item.put(rs.getMetaData().getColumnName(i), rs.getObject(i)); } return item; } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/jdbc/JdbcCursorItemReaderProperties.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.jdbc; import org.springframework.boot.context.properties.ConfigurationProperties; /** * @author Michael Minella * @since 2.3 */ @ConfigurationProperties(prefix = "spring.batch.job.jdbccursoritemreader") public class JdbcCursorItemReaderProperties { /** * Configure whether the state of the * {@link org.springframework.batch.infrastructure.item.ItemStreamSupport} should be * persisted within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext} for restart * purposes. Defaults to {@code true}. */ private boolean saveState = true; /** * Returns the configured value of the name used to calculate {@code ExecutionContext} * keys. */ private String name; /** * Configure the maximum number of items to be read. */ private int maxItemCount = Integer.MAX_VALUE; /** * Index for the current item. Also used on restarts to indicate where to start from. * Defaults to 0. */ private int currentItemCount = 0; /** * The number of items to return each time the cursor fetches from the server. */ private int fetchSize; /** * Sets the maximum number of rows to be read with this reader. */ private int maxRows; /** * The time in milliseconds for the query to timeout. */ private int queryTimeout; /** * Establishes whether SQL warnings should be ignored. Defaults to {@code false}. */ private boolean ignoreWarnings; /** * Sets whether the cursor's position should be validated with each item read. * Defaults to {@code false}. */ private boolean verifyCursorPosition; /** * Establishes {@code false} the driver supports absolute positioning of a cursor. * Defaults to {@code false}. */ private boolean driverSupportsAbsolute; /** * Establishes whether the connection used for the cursor is being used by all other * processing and is, therefore, part of the same transaction. Defaults to * {@code false} */ private boolean useSharedExtendedConnection; /** * The SQL query to be executed. */ private String sql; /** * Returns the configured value of if the state of the reader will be persisted. * @return true if the state will be persisted */ public boolean isSaveState() { return this.saveState; } /** * Configure if the state of the * {@link org.springframework.batch.infrastructure.item.ItemStreamSupport} should be * persisted within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext} for restart * purposes. * @param saveState defaults to true */ public void setSaveState(boolean saveState) { this.saveState = saveState; } /** * Returns the configured value of the name used to calculate {@code ExecutionContext} * keys. * @return the name */ public String getName() { return this.name; } /** * The name used to calculate the key within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext}. Required if * {@link #setSaveState} is set to true. * @param name name of the reader instance * @see org.springframework.batch.infrastructure.item.ItemStreamSupport#setName(String) */ public void setName(String name) { this.name = name; } /** * The maximum number of items to be read. * @return the configured number of items, defaults to Integer.MAX_VALUE */ public int getMaxItemCount() { return this.maxItemCount; } /** * Configure the max number of items to be read. * @param maxItemCount the max items to be read * @see org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader#setMaxItemCount(int) */ public void setMaxItemCount(int maxItemCount) { this.maxItemCount = maxItemCount; } /** * Provides the index of the current item. * @return item index */ public int getCurrentItemCount() { return this.currentItemCount; } /** * Index for the current item. Also used on restarts to indicate where to start from. * @param currentItemCount current index * @see org.springframework.batch.infrastructure.item.support.AbstractItemCountingItemStreamItemReader#setCurrentItemCount(int) */ public void setCurrentItemCount(int currentItemCount) { this.currentItemCount = currentItemCount; } /** * Provides the number of items to return each time the cursor fetches from the * server. * @return fetch size */ public int getFetchSize() { return fetchSize; } /** * Sets the number of items to return each time the cursor fetches from the server. * @param fetchSize the number of items * @see org.springframework.batch.infrastructure.item.database.builder.JdbcCursorItemReaderBuilder#fetchSize(int) */ public void setFetchSize(int fetchSize) { this.fetchSize = fetchSize; } /** * Provides the maximum number of rows to read with this reader. * @return maxiumum number of items */ public int getMaxRows() { return maxRows; } /** * Sets the maximum number of rows to be read with this reader. * @param maxRows maximum number of items * @see org.springframework.batch.infrastructure.item.database.builder.JdbcCursorItemReaderBuilder#maxRows(int) */ public void setMaxRows(int maxRows) { this.maxRows = maxRows; } /** * Provides the time in milliseconds for the query to timeout. * @return milliseconds for the timeout */ public int getQueryTimeout() { return queryTimeout; } /** * Sets the time in milliseconds for the query to timeout. * @param queryTimeout milliseconds * @see org.springframework.batch.infrastructure.item.database.builder.JdbcCursorItemReaderBuilder#queryTimeout(int) */ public void setQueryTimeout(int queryTimeout) { this.queryTimeout = queryTimeout; } /** * Provides if SQL warnings should be ignored. * @return true if warnings should be ignored */ public boolean isIgnoreWarnings() { return ignoreWarnings; } /** * Sets if SQL warnings should be ignored. * @param ignoreWarnings indicator if the warnings should be ignored * @see org.springframework.batch.infrastructure.item.database.builder.JdbcCursorItemReaderBuilder#ignoreWarnings(boolean) */ public void setIgnoreWarnings(boolean ignoreWarnings) { this.ignoreWarnings = ignoreWarnings; } /** * Indicates if the cursor's position should be validated with each item read (to * confirm that the RowMapper has not moved the cursor's location). * @return true if the position should be validated */ public boolean isVerifyCursorPosition() { return verifyCursorPosition; } /** * Provides if the cursor's position should be validated with each item read. * @param verifyCursorPosition true if the position should be validated * @see org.springframework.batch.infrastructure.item.database.builder.JdbcCursorItemReaderBuilder#verifyCursorPosition(boolean) */ public void setVerifyCursorPosition(boolean verifyCursorPosition) { this.verifyCursorPosition = verifyCursorPosition; } /** * Provides if the driver supports absolute positioning of a cursor. * @return true if the driver supports absolute positioning */ public boolean isDriverSupportsAbsolute() { return driverSupportsAbsolute; } /** * Sets if the driver supports absolute positioning of a cursor. * @param driverSupportsAbsolute true if the driver supports absolute positioning * @see org.springframework.batch.infrastructure.item.database.builder.JdbcCursorItemReaderBuilder#driverSupportsAbsolute(boolean) */ public void setDriverSupportsAbsolute(boolean driverSupportsAbsolute) { this.driverSupportsAbsolute = driverSupportsAbsolute; } /** * Sets whether the connection used for the cursor is being used by all other * processing and is, therefore, part of the same transaction. * @return true if the connection is shared beyond this query */ public boolean isUseSharedExtendedConnection() { return useSharedExtendedConnection; } /** * Sets whether the the connection used for the cursor is being used by all other * processing and is, therefore, part of the same transaction. * @param useSharedExtendedConnection true if the connection is shared beyond this * query * @see org.springframework.batch.infrastructure.item.database.builder.JdbcCursorItemReaderBuilder#useSharedExtendedConnection(boolean) */ public void setUseSharedExtendedConnection(boolean useSharedExtendedConnection) { this.useSharedExtendedConnection = useSharedExtendedConnection; } /** * Returns the SQL query to be executed. * @return the SQL query */ public String getSql() { return sql; } /** * Sets the SQL query to be executed. * @param sql the query * @see org.springframework.batch.infrastructure.item.database.builder.JdbcCursorItemReaderBuilder#sql(String) */ public void setSql(String sql) { this.sql = sql; } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/jdbc/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Auto-configuration classes for JDBC item readers and writers in single-step batch jobs. */ package org.springframework.cloud.task.batch.autoconfigure.jdbc; ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/kafka/KafkaItemReaderAutoConfiguration.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.kafka; import java.time.Duration; import java.util.ArrayList; import java.util.Map; import java.util.Properties; import org.springframework.batch.infrastructure.item.kafka.KafkaItemReader; import org.springframework.batch.infrastructure.item.kafka.builder.KafkaItemReaderBuilder; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.kafka.autoconfigure.KafkaProperties; import org.springframework.context.annotation.Bean; import org.springframework.util.StringUtils; /** * * AutoConfiguration for a {@code KafkaItemReader}. * * @author Glenn Renfro * @author Michael Minella * @since 2.3 */ @AutoConfiguration @EnableConfigurationProperties({ KafkaProperties.class, KafkaItemReaderProperties.class }) @AutoConfigureAfter(BatchAutoConfiguration.class) public class KafkaItemReaderAutoConfiguration { @Autowired private KafkaProperties kafkaProperties; @Bean @ConditionalOnMissingBean @ConditionalOnProperty(prefix = "spring.batch.job.kafkaitemreader", name = "name") public KafkaItemReader> kafkaItemReader( KafkaItemReaderProperties kafkaItemReaderProperties) { Properties consumerProperties = new Properties(); consumerProperties.putAll(this.kafkaProperties.getConsumer().buildProperties()); validateProperties(kafkaItemReaderProperties); if (kafkaItemReaderProperties.getPartitions() == null || kafkaItemReaderProperties.getPartitions().size() == 0) { kafkaItemReaderProperties.setPartitions(new ArrayList<>(1)); kafkaItemReaderProperties.getPartitions().add(0); } return new KafkaItemReaderBuilder>() .partitions(kafkaItemReaderProperties.getPartitions()) .consumerProperties(consumerProperties) .name(kafkaItemReaderProperties.getName()) .pollTimeout(Duration.ofSeconds(kafkaItemReaderProperties.getPollTimeOutInSeconds())) .saveState(kafkaItemReaderProperties.isSaveState()) .topic(kafkaItemReaderProperties.getTopic()) .build(); } private void validateProperties(KafkaItemReaderProperties kafkaItemReaderProperties) { if (!StringUtils.hasText(kafkaItemReaderProperties.getName())) { throw new IllegalArgumentException("Name must not be empty or null"); } if (!StringUtils.hasText(kafkaItemReaderProperties.getTopic())) { throw new IllegalArgumentException("Topic must not be empty or null"); } if (!StringUtils.hasText(this.kafkaProperties.getConsumer().getGroupId())) { throw new IllegalArgumentException("GroupId must not be empty or null"); } if (this.kafkaProperties.getBootstrapServers() == null || this.kafkaProperties.getBootstrapServers().size() == 0) { throw new IllegalArgumentException("Bootstrap Servers must be configured"); } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/kafka/KafkaItemReaderProperties.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.kafka; import java.util.ArrayList; import java.util.List; import org.springframework.boot.context.properties.ConfigurationProperties; /** * Properties to configure a {@code KafkaItemReader}. * * @author Glenn Renfro * @since 2.3 */ @ConfigurationProperties(prefix = "spring.batch.job.kafkaitemreader") public class KafkaItemReaderProperties { /** * The name used to calculate the key within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext}. */ private String name; /** * The topic name from which the messages is read. */ private String topic; /** * A list of partitions to manually assign to the consumer. Defaults to a single entry * value of 1. */ private List partitions = new ArrayList<>(); /** * Establish the {@code pollTimeout} for the {@code poll()} operations. Defaults to 30 * seconds. */ private long pollTimeOutInSeconds = 30L; /** * Configure whether the state of the * {@link org.springframework.batch.infrastructure.item.ItemStreamSupport} should be * persisted within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext} for restart * purposes. Defaults to {@code true}. */ private boolean saveState = true; /** * Returns the configured value of the name used to calculate {@code ExecutionContext} * keys. * @return the name */ public String getName() { return name; } /** * The name used to calculate the key within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext}. * @param name name of the writer instance * @see org.springframework.batch.infrastructure.item.ItemStreamSupport#setName(String) */ public void setName(String name) { this.name = name; } /** * Returns the name of the topic from which messages will be read. * @return the name of the topic. */ public String getTopic() { return topic; } /** * The topic name from which the messages will be read. * @param topic name of the topic */ public void setTopic(String topic) { this.topic = topic; } /** * A list of partitions to manually assign to the consumer. Defaults to a single entry * value of 1. * @return the list of partitions. */ public List getPartitions() { return partitions; } /** * A list of partitions to manually assign to the consumer. Defaults to a single entry * value of 1. * @param partitions list of partitions */ public void setPartitions(List partitions) { this.partitions = partitions; } /** * Get the pollTimeout for the poll() operations. Defaults to 30 seconds. * @return long containing the poll timeout. */ public long getPollTimeOutInSeconds() { return pollTimeOutInSeconds; } /** * Set the pollTimeout for the poll() operations. Defaults to 30 seconds. * @param pollTimeOutInSeconds the number of seconds to wait before timing out. */ public void setPollTimeOutInSeconds(long pollTimeOutInSeconds) { this.pollTimeOutInSeconds = pollTimeOutInSeconds; } /** * Configure if the state of the * {@link org.springframework.batch.infrastructure.item.ItemStreamSupport} should be * persisted within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext} for restart * purposes. Defaults to true. * @return current status of the saveState flag. */ public boolean isSaveState() { return saveState; } /** * Configure if the state of the * {@link org.springframework.batch.infrastructure.item.ItemStreamSupport} should be * persisted within the * {@link org.springframework.batch.infrastructure.item.ExecutionContext} for restart * purposes. * @param saveState true if state should be persisted. Defaults to true. */ public void setSaveState(boolean saveState) { this.saveState = saveState; } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/kafka/KafkaItemWriterAutoConfiguration.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.kafka; import java.util.HashMap; import java.util.Map; import org.springframework.batch.infrastructure.item.kafka.KafkaItemWriter; import org.springframework.batch.infrastructure.item.kafka.builder.KafkaItemWriterBuilder; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.kafka.autoconfigure.KafkaProperties; import org.springframework.context.annotation.Bean; import org.springframework.core.convert.converter.Converter; import org.springframework.kafka.core.DefaultKafkaProducerFactory; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.kafka.core.ProducerFactory; import org.springframework.kafka.support.serializer.JacksonJsonSerializer; import org.springframework.util.Assert; /** * * Autconfiguration for a {@code KafkaItemReader}. * * @author Glenn Renfro * @author Michael Minella * @since 2.3 */ @AutoConfiguration @EnableConfigurationProperties({ KafkaProperties.class, KafkaItemWriterProperties.class }) @AutoConfigureAfter(BatchAutoConfiguration.class) public class KafkaItemWriterAutoConfiguration { @Autowired private KafkaProperties kafkaProperties; @Bean @ConditionalOnMissingBean @ConditionalOnProperty(prefix = "spring.batch.job.kafkaitemwriter", name = "topic") public KafkaItemWriter> kafkaItemWriter( KafkaItemWriterProperties kafkaItemWriterProperties, ProducerFactory> producerFactory, @Qualifier("batchItemKeyMapper") Converter, Object> itemKeyMapper) { validateProperties(kafkaItemWriterProperties); KafkaTemplate template = new KafkaTemplate(producerFactory); template.setDefaultTopic(kafkaItemWriterProperties.getTopic()); return new KafkaItemWriterBuilder>().delete(kafkaItemWriterProperties.isDelete()) .kafkaTemplate(template) .itemKeyMapper(itemKeyMapper) .build(); } @Bean @ConditionalOnMissingBean(name = "batchItemKeyMapper") public Converter, Object> batchItemKeyMapper() { return new Converter, Object>() { @Override public Object convert(Map source) { return source; } }; } @Bean @ConditionalOnMissingBean ProducerFactory> producerFactory() { Map configs = new HashMap<>(); configs.putAll(this.kafkaProperties.getProducer().buildProperties()); return new DefaultKafkaProducerFactory<>(configs, null, new JacksonJsonSerializer()); } private void validateProperties(KafkaItemWriterProperties kafkaItemWriterProperties) { Assert.hasText(kafkaItemWriterProperties.getTopic(), "topic must not be empty or null"); } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/kafka/KafkaItemWriterProperties.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.kafka; import org.springframework.boot.context.properties.ConfigurationProperties; /** * Properties to configure a {@code KafkaItemWriter}. * * @author Glenn Renfro * @since 2.3 */ @ConfigurationProperties(prefix = "spring.batch.job.kafkaitemwriter") public class KafkaItemWriterProperties { /** * The topic name from which the messages are written. */ private String topic; /** * Indicate whether the items being passed to the writer are all to be sent as delete * events to the topic. Defaults to {@code false}. */ private boolean delete; /** * Returns the name of the topic from which messages will be written. * @return the name of the topic. */ public String getTopic() { return topic; } /** * The topic name from which the messages are written. * @param topic name of the topic */ public void setTopic(String topic) { this.topic = topic; } /** * Indicate if the items being passed to the writer are all to be sent as delete * events to the topic. A delete event is made of a key with a null value. If set to * false (default), the items will be sent with provided value and key converter by * the itemKeyMapper. If set to true, the items will be sent with the key converter * from the value by the itemKeyMapper and a null value. * @return removal indicator. */ public boolean isDelete() { return delete; } /** * Indicate if the items being passed to the writer are all to be sent as delete * events to the topic. A delete event is made of a key with a null value. If set to * false (default), the items will be sent with provided value and key converter by * the itemKeyMapper. If set to true, the items will be sent with the key converter * from the value by the itemKeyMapper and a null value. * @param delete removal indicator. */ public void setDelete(boolean delete) { this.delete = delete; } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/kafka/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Auto-configuration classes for Kafka item readers and writers in single-step batch * jobs. */ package org.springframework.cloud.task.batch.autoconfigure.kafka; ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Auto-configuration classes for Spring Cloud Task single-step batch jobs. */ package org.springframework.cloud.task.batch.autoconfigure; ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/rabbit/AmqpItemReaderAutoConfiguration.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.rabbit; import java.util.Map; import org.springframework.amqp.core.AmqpTemplate; import org.springframework.amqp.support.converter.JacksonJsonMessageConverter; import org.springframework.amqp.support.converter.MessageConverter; import org.springframework.batch.infrastructure.item.amqp.AmqpItemReader; import org.springframework.batch.infrastructure.item.amqp.builder.AmqpItemReaderBuilder; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.amqp.autoconfigure.RabbitProperties; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; /** * Autconfiguration for a {@code AmqpItemReader}. * * @author Glenn Renfro * @author Michael Minella * @since 2.3 */ @AutoConfiguration @EnableConfigurationProperties(AmqpItemReaderProperties.class) @AutoConfigureAfter(BatchAutoConfiguration.class) @ConditionalOnProperty(name = "spring.batch.job.amqpitemreader.enabled", havingValue = "true", matchIfMissing = false) public class AmqpItemReaderAutoConfiguration { @Autowired(required = false) private RabbitProperties rabbitProperties; @Bean public AmqpItemReaderProperties amqpItemReaderProperties() { return new AmqpItemReaderProperties(); } @Bean public AmqpItemReader> amqpItemReader(AmqpTemplate amqpTemplate, @Autowired(required = false) Class itemType) { AmqpItemReaderBuilder> builder = new AmqpItemReaderBuilder>() .amqpTemplate(amqpTemplate); if (itemType != null) { builder.itemType(itemType); } return builder.build(); } @ConditionalOnProperty(name = "spring.batch.job.amqpitemreader.jsonConverterEnabled", havingValue = "true", matchIfMissing = true) @Bean public MessageConverter messageConverter() { return new JacksonJsonMessageConverter(); } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/rabbit/AmqpItemReaderProperties.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.rabbit; import org.springframework.amqp.support.converter.JacksonJsonMessageConverter; import org.springframework.boot.context.properties.ConfigurationProperties; /** * Properties to configure a {@code AmqpItemReader}. * * @author Glenn Renfro * @since 2.3 */ @ConfigurationProperties(prefix = "spring.batch.job.amqpitemreader") public class AmqpItemReaderProperties { /** * Enables or disables the {@code AmqpItemReader}. Defaults to {@code false}. */ private boolean enabled; /** * Establishes whether the {@link JacksonJsonMessageConverter} is to be used as a * message converter. Defaults to {@code true}. */ private boolean jsonConverterEnabled = true; /** * The state of the enabled flag. * @return true if AmqpItemReader is enabled. Otherwise false. */ public boolean isEnabled() { return enabled; } /** * Enables or disables the AmqpItemReader. * @param enabled if true then AmqpItemReader will be enabled. Defaults to false. */ public void setEnabled(boolean enabled) { this.enabled = enabled; } /** * States whether the {@link JacksonJsonMessageConverter} is used as a message * converter. * @return true if enabled else false. */ public boolean isJsonConverterEnabled() { return jsonConverterEnabled; } /** * Establishes whether the {@link JacksonJsonMessageConverter} is to be used as a * message converter. * @param jsonConverterEnabled true if it is to be enabled else false. Defaults to * true. */ public void setJsonConverterEnabled(boolean jsonConverterEnabled) { this.jsonConverterEnabled = jsonConverterEnabled; } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/rabbit/AmqpItemWriterAutoConfiguration.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.rabbit; import java.util.Map; import org.springframework.amqp.core.AmqpTemplate; import org.springframework.amqp.support.converter.JacksonJsonMessageConverter; import org.springframework.amqp.support.converter.MessageConverter; import org.springframework.batch.infrastructure.item.amqp.AmqpItemWriter; import org.springframework.batch.infrastructure.item.amqp.builder.AmqpItemWriterBuilder; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; /** * Autconfiguration for a {@code AmqpItemWriter}. * * @author Glenn Renfro * @author Michael Minella * @since 2.3 */ @AutoConfiguration @EnableConfigurationProperties(AmqpItemWriterProperties.class) @AutoConfigureAfter(BatchAutoConfiguration.class) @ConditionalOnProperty(name = "spring.batch.job.amqpitemwriter.enabled", havingValue = "true", matchIfMissing = false) public class AmqpItemWriterAutoConfiguration { @Bean public AmqpItemWriter> amqpItemWriter(AmqpTemplate amqpTemplate) { return new AmqpItemWriterBuilder>().amqpTemplate(amqpTemplate).build(); } @Bean public AmqpItemWriterProperties amqpItemWriterProperties() { return new AmqpItemWriterProperties(); } @ConditionalOnProperty(name = "spring.batch.job.amqpitemwriter.jsonConverterEnabled", havingValue = "true", matchIfMissing = true) @Bean public MessageConverter messageConverter() { return new JacksonJsonMessageConverter(); } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/rabbit/AmqpItemWriterProperties.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.rabbit; import org.springframework.amqp.support.converter.JacksonJsonMessageConverter; import org.springframework.boot.context.properties.ConfigurationProperties; /** * @author Glenn Renfro * @since 2.3 */ @ConfigurationProperties(prefix = "spring.batch.job.amqpitemwriter") public class AmqpItemWriterProperties { /** * Enables or disables the AmqpItemWriter. Defaults to {@code false}. */ private boolean enabled; /** * Establishes whether the {@link JacksonJsonMessageConverter} is to be used as a * message converter. Defaults to {@code true}. */ private boolean jsonConverterEnabled = true; /** * The state of the enabled flag. * @return {@code true} if {@code AmqpItemWriter} is enabled. Otherwise {@code false}. */ public boolean isEnabled() { return enabled; } /** * Enables or disables the {@code AmqpItemWriter}. * @param enabled if {@code true} then {@code AmqpItemWriter} is enabled. Defaults to * {@code false}. */ public void setEnabled(boolean enabled) { this.enabled = enabled; } /** * States whether the {@link JacksonJsonMessageConverter} is used as a message * converter. * @return true if enabled else false. */ public boolean isJsonConverterEnabled() { return jsonConverterEnabled; } /** * Establishes whether the {@link JacksonJsonMessageConverter} is to be used as a * message converter. * @param jsonConverterEnabled true if it is to be enabled else false. Defaults to * true. */ public void setJsonConverterEnabled(boolean jsonConverterEnabled) { this.jsonConverterEnabled = jsonConverterEnabled; } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/java/org/springframework/cloud/task/batch/autoconfigure/rabbit/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Auto-configuration classes for RabbitMQ item readers and writers in single-step batch * jobs. */ package org.springframework.cloud.task.batch.autoconfigure.rabbit; ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports ================================================ org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderAutoConfiguration org.springframework.cloud.task.batch.autoconfigure.RangeConverter org.springframework.cloud.task.batch.autoconfigure.SingleStepJobAutoConfiguration org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterAutoConfiguration org.springframework.cloud.task.batch.autoconfigure.jdbc.JdbcBatchItemWriterAutoConfiguration org.springframework.cloud.task.batch.autoconfigure.jdbc.JdbcCursorItemReaderAutoConfiguration org.springframework.cloud.task.batch.autoconfigure.rabbit.AmqpItemReaderAutoConfiguration org.springframework.cloud.task.batch.autoconfigure.rabbit.AmqpItemWriterAutoConfiguration org.springframework.cloud.task.batch.autoconfigure.kafka.KafkaItemReaderAutoConfiguration org.springframework.cloud.task.batch.autoconfigure.kafka.KafkaItemWriterAutoConfiguration ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/main/resources/META-INF/spring-configuration-metadata.json ================================================ { "groups": [ { "name": "spring.batch.job", "type": "org.springframework.cloud.task.batch.autoconfigure.SingleStepJobProperties", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.SingleStepJobProperties" }, { "name": "spring.batch.job.flatfilereader", "type": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties" }, { "name": "spring.batch.job.flatfilewriter", "type": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties" } ], "properties": [ { "name": "spring.batch.job.chunk-size", "type": "java.lang.Integer", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.SingleStepJobProperties" }, { "name": "spring.batch.job.flatfilereader.comments", "type": "java.util.List", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties" }, { "name": "spring.batch.job.flatfilereader.current-item-count", "type": "java.lang.Integer", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties", "defaultValue": 0 }, { "name": "spring.batch.job.flatfilereader.delimited", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties", "defaultValue": false }, { "name": "spring.batch.job.flatfilereader.delimiter", "type": "java.lang.String", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties" }, { "name": "spring.batch.job.flatfilereader.encoding", "type": "java.lang.String", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties" }, { "name": "spring.batch.job.flatfilereader.fixed-length", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties", "defaultValue": false }, { "name": "spring.batch.job.flatfilereader.included-fields", "type": "java.util.List", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties" }, { "name": "spring.batch.job.flatfilereader.lines-to-skip", "type": "java.lang.Integer", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties", "defaultValue": 0 }, { "name": "spring.batch.job.flatfilereader.max-item-count", "type": "java.lang.Integer", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties" }, { "name": "spring.batch.job.flatfilereader.name", "type": "java.lang.String", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties" }, { "name": "spring.batch.job.flatfilereader.names", "type": "java.lang.String[]", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties" }, { "name": "spring.batch.job.flatfilereader.parsing-strict", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties", "defaultValue": true }, { "name": "spring.batch.job.flatfilereader.quote-character", "type": "java.lang.Character", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties" }, { "name": "spring.batch.job.flatfilereader.ranges", "type": "java.util.List", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties" }, { "name": "spring.batch.job.flatfilereader.resource", "type": "org.springframework.core.io.Resource", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties" }, { "name": "spring.batch.job.flatfilereader.save-state", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties", "defaultValue": true }, { "name": "spring.batch.job.flatfilereader.strict", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemReaderProperties", "defaultValue": true }, { "name": "spring.batch.job.flatfilewriter.append", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties", "defaultValue": false }, { "name": "spring.batch.job.flatfilewriter.delimited", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties", "defaultValue": false }, { "name": "spring.batch.job.flatfilewriter.delimiter", "type": "java.lang.String", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties", "defaultValue": "," }, { "name": "spring.batch.job.flatfilewriter.encoding", "type": "java.lang.String", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties" }, { "name": "spring.batch.job.flatfilewriter.force-sync", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties", "defaultValue": false }, { "name": "spring.batch.job.flatfilewriter.format", "type": "java.lang.String", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties" }, { "name": "spring.batch.job.flatfilewriter.formatted", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties", "defaultValue": false }, { "name": "spring.batch.job.flatfilewriter.line-separator", "type": "java.lang.String", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties" }, { "name": "spring.batch.job.flatfilewriter.locale", "type": "java.util.Locale", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties" }, { "name": "spring.batch.job.flatfilewriter.maximum-length", "type": "java.lang.Integer", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties", "defaultValue": 0 }, { "name": "spring.batch.job.flatfilewriter.minimum-length", "type": "java.lang.Integer", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties", "defaultValue": 0 }, { "name": "spring.batch.job.flatfilewriter.name", "type": "java.lang.String", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties" }, { "name": "spring.batch.job.flatfilewriter.names", "type": "java.lang.String[]", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties" }, { "name": "spring.batch.job.flatfilewriter.resource", "type": "org.springframework.core.io.Resource", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties" }, { "name": "spring.batch.job.flatfilewriter.save-state", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties", "defaultValue": true }, { "name": "spring.batch.job.flatfilewriter.should-delete-if-empty", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties", "defaultValue": false }, { "name": "spring.batch.job.flatfilewriter.should-delete-if-exists", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties", "defaultValue": true }, { "name": "spring.batch.job.flatfilewriter.transactional", "type": "java.lang.Boolean", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.flatfile.FlatFileItemWriterProperties" }, { "name": "spring.batch.job.job-name", "type": "java.lang.String", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.SingleStepJobProperties" }, { "name": "spring.batch.job.step-name", "type": "java.lang.String", "sourceType": "org.springframework.cloud.task.batch.autoconfigure.SingleStepJobProperties" } ], "hints": [] } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/java/org/springframework/cloud/task/batch/autoconfigure/RangeConverterTests.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure; import org.junit.jupiter.api.Test; import org.springframework.batch.infrastructure.item.file.transform.Range; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; /** * @author Michael Minella */ public class RangeConverterTests { @Test public void testNullInput() { RangeConverter converter = new RangeConverter(); assertThat(converter.convert(null)).isNull(); } @Test public void testStartValueOnly() { RangeConverter converter = new RangeConverter(); Range range = converter.convert("5"); assertThat(range.getMin()).isEqualTo(5); assertThat(range.getMax()).isEqualTo(Integer.MAX_VALUE); } @Test public void testStartAndEndValue() { RangeConverter converter = new RangeConverter(); Range range = converter.convert("5-25"); assertThat(range.getMin()).isEqualTo(5); assertThat(range.getMax()).isEqualTo(25); } @Test public void testIllegalValue() { RangeConverter converter = new RangeConverter(); assertThatExceptionOfType(NumberFormatException.class).isThrownBy(() -> { converter.convert("invalid"); }); } @Test public void testTooManyValues() { RangeConverter converter = new RangeConverter(); assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { converter.convert("1-2-3-4"); }); } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/java/org/springframework/cloud/task/batch/autoconfigure/SingleStepJobAutoConfigurationTests.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.infrastructure.item.support.ListItemReader; import org.springframework.batch.infrastructure.item.support.ListItemWriter; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.DataSourceAutoConfiguration; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; /** * @author Michael Minella */ public class SingleStepJobAutoConfigurationTests { @Test public void testInvalidProperties() { SingleStepJobProperties properties = new SingleStepJobProperties(); try { new SingleStepJobAutoConfiguration(properties, null); } catch (IllegalArgumentException iae) { assertThat(iae.getMessage()).isEqualTo("A job name is required"); } catch (Throwable t) { fail("wrong exception was thrown", t); } properties.setJobName("job"); try { new SingleStepJobAutoConfiguration(properties, null); } catch (IllegalArgumentException iae) { assertThat(iae.getMessage()).isEqualTo("A step name is required"); } catch (Throwable t) { fail("wrong exception was thrown", t); } properties.setStepName("step"); try { new SingleStepJobAutoConfiguration(properties, null); } catch (IllegalArgumentException iae) { assertThat(iae.getMessage()).isEqualTo("A chunk size is required"); } catch (Throwable t) { fail("wrong exception was thrown", t); } properties.setChunkSize(-5); try { new SingleStepJobAutoConfiguration(properties, null); } catch (IllegalArgumentException iae) { assertThat(iae.getMessage()).isEqualTo("A chunk size greater than zero is required"); } catch (Throwable t) { fail("wrong exception was thrown", t); } properties.setChunkSize(5); } @Test public void testSimpleConfiguration() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(SimpleConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5"); validateConfiguration(applicationContextRunner); } @Test public void testSimpleConfigurationKabobStyle() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(SimpleConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.job-name=job", "spring.batch.job.step-name=step1", "spring.batch.job.chunk-size=5"); validateConfiguration(applicationContextRunner); } private void validateConfiguration(ApplicationContextRunner applicationContextRunner) { applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); ListItemWriter itemWriter = context.getBean(ListItemWriter.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } List> writtenItems = itemWriter.getWrittenItems(); assertThat(writtenItems.size()).isEqualTo(3); assertThat(writtenItems.get(0).get("item")).isEqualTo("foo"); assertThat(writtenItems.get(1).get("item")).isEqualTo("bar"); assertThat(writtenItems.get(2).get("item")).isEqualTo("baz"); }); } @Configuration public static class SimpleConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemReader> itemReader() { List> items = new ArrayList<>(3); items.add(Collections.singletonMap("item", "foo")); items.add(Collections.singletonMap("item", "bar")); items.add(Collections.singletonMap("item", "baz")); return new ListItemReader<>(items); } @Bean public ListItemWriter> itemWriter() { return new ListItemWriter<>(); } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/java/org/springframework/cloud/task/batch/autoconfigure/flatfile/FlatFileItemReaderAutoConfigurationTests.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.flatfile; import java.math.BigInteger; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.infrastructure.item.file.LineCallbackHandler; import org.springframework.batch.infrastructure.item.file.LineMapper; import org.springframework.batch.infrastructure.item.file.mapping.FieldSetMapper; import org.springframework.batch.infrastructure.item.file.separator.RecordSeparatorPolicy; import org.springframework.batch.infrastructure.item.file.transform.DefaultFieldSet; import org.springframework.batch.infrastructure.item.file.transform.LineTokenizer; import org.springframework.batch.infrastructure.item.support.ListItemWriter; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.DataSourceAutoConfiguration; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.batch.autoconfigure.RangeConverter; import org.springframework.cloud.task.batch.autoconfigure.SingleStepJobAutoConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; /** * @author Michael Minella * @author Glenn Renfro */ public class FlatFileItemReaderAutoConfigurationTests { /** * Contents of the file to be read (included here because it's UTF-16). * *
	 * 1@2@3@4@5@six
	 * # This should be ignored
	 * 7@8@9@10@11@twelve
	 * $ So should this
	 * 13@14@15@16@17@eighteen
	 * 19@20@21@22@23@%twenty four%
	 * 15@26@27@28@29@thirty
	 * 31@32@33@34@35@thirty six
	 * 37@38@39@40@41@forty two
	 * 43@44@45@46@47@forty eight
	 * 49@50@51@52@53@fifty four
	 * 55@56@57@58@59@sixty
	 * 
*/ @Test public void testFullDelimitedConfiguration() { BigInteger tokenizerValidator = BigInteger.ZERO; tokenizerValidator = tokenizerValidator.flipBit(0); ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(JobConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, FlatFileItemReaderAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.flatfileitemreader.savestate=true", "spring.batch.job.flatfileitemreader.name=fullDelimitedConfiguration", "spring.batch.job.flatfileitemreader.maxItemCount=5", "spring.batch.job.flatfileitemreader.currentItemCount=2", "spring.batch.job.flatfileitemreader.comments=#,$", "spring.batch.job.flatfileitemreader.resource=/testUTF16.csv", "spring.batch.job.flatfileitemreader.strict=true", "spring.batch.job.flatfileitemreader.encoding=UTF-16", "spring.batch.job.flatfileitemreader.linesToSkip=1", "spring.batch.job.flatfileitemreader.delimited=true", "spring.batch.job.flatfileitemreader.delimiter=@", "spring.batch.job.flatfileitemreader.quoteCharacter=%", "spring.batch.job.flatfileitemreader.includedFields=1,3,5", "spring.batch.job.flatfileitemreader.names=foo,bar,baz", "spring.batch.job.flatfileitemreader.parsingStrict=false"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); ListItemWriter itemWriter = context.getBean(ListItemWriter.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } List writtenItems = itemWriter.getWrittenItems(); assertThat(writtenItems.size()).isEqualTo(3); assertThat(((Map) writtenItems.get(0)).get("foo")).isEqualTo("20"); assertThat(((Map) writtenItems.get(0)).get("bar")).isEqualTo("22"); assertThat(((Map) writtenItems.get(0)).get("baz")).isEqualTo("twenty four"); assertThat(((Map) writtenItems.get(1)).get("foo")).isEqualTo("26"); assertThat(((Map) writtenItems.get(1)).get("bar")).isEqualTo("28"); assertThat(((Map) writtenItems.get(1)).get("baz")).isEqualTo("thirty"); assertThat(((Map) writtenItems.get(2)).get("foo")).isEqualTo("32"); assertThat(((Map) writtenItems.get(2)).get("bar")).isEqualTo("34"); assertThat(((Map) writtenItems.get(2)).get("baz")).isEqualTo("thirty six"); }); } @Test public void testFixedWidthConfiguration() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(JobConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, FlatFileItemReaderAutoConfiguration.class, RangeConverter.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.flatfileitemreader.savestate=true", "spring.batch.job.flatfileitemreader.name=fixedWidthConfiguration", "spring.batch.job.flatfileitemreader.comments=#,$", "spring.batch.job.flatfileitemreader.resource=/test.txt", "spring.batch.job.flatfileitemreader.strict=true", "spring.batch.job.flatfileitemreader.fixedLength=true", "spring.batch.job.flatfileitemreader.ranges=3-4,7-8,11", "spring.batch.job.flatfileitemreader.names=foo,bar,baz", "spring.batch.job.flatfileitemreader.parsingStrict=false"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); ListItemWriter itemWriter = context.getBean(ListItemWriter.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } List writtenItems = itemWriter.getWrittenItems(); assertThat(writtenItems.size()).isEqualTo(6); assertThat(((Map) writtenItems.get(0)).get("foo")).isEqualTo("2"); assertThat(((Map) writtenItems.get(0)).get("bar")).isEqualTo("4"); assertThat(((Map) writtenItems.get(0)).get("baz")).isEqualTo("six"); assertThat(((Map) writtenItems.get(1)).get("foo")).isEqualTo("8"); assertThat(((Map) writtenItems.get(1)).get("bar")).isEqualTo("10"); assertThat(((Map) writtenItems.get(1)).get("baz")).isEqualTo("twelve"); assertThat(((Map) writtenItems.get(2)).get("foo")).isEqualTo("14"); assertThat(((Map) writtenItems.get(2)).get("bar")).isEqualTo("16"); assertThat(((Map) writtenItems.get(2)).get("baz")).isEqualTo("eighteen"); assertThat(((Map) writtenItems.get(3)).get("foo")).isEqualTo("20"); assertThat(((Map) writtenItems.get(3)).get("bar")).isEqualTo("22"); assertThat(((Map) writtenItems.get(3)).get("baz")).isEqualTo("twenty four"); assertThat(((Map) writtenItems.get(4)).get("foo")).isEqualTo("26"); assertThat(((Map) writtenItems.get(4)).get("bar")).isEqualTo("28"); assertThat(((Map) writtenItems.get(4)).get("baz")).isEqualTo("thirty"); assertThat(((Map) writtenItems.get(5)).get("foo")).isEqualTo("32"); assertThat(((Map) writtenItems.get(5)).get("bar")).isEqualTo("34"); assertThat(((Map) writtenItems.get(5)).get("baz")).isEqualTo("thirty six"); }); } @Test public void testCustomLineMapper() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(CustomLineMapperConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, FlatFileItemReaderAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.flatfileitemreader.name=fixedWidthConfiguration", "spring.batch.job.flatfileitemreader.resource=/test.txt", "spring.batch.job.flatfileitemreader.strict=true"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); ListItemWriter itemWriter = context.getBean(ListItemWriter.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } List writtenItems = itemWriter.getWrittenItems(); assertThat(writtenItems.size()).isEqualTo(8); }); } /** * This test requires an input file with an even number of records. */ @Test public void testCustomRecordSeparatorAndSkippedLines() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(RecordSeparatorAndSkippedLinesJobConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, FlatFileItemReaderAutoConfiguration.class, RangeConverter.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.flatfileitemreader.name=fixedWidthConfiguration", "spring.batch.job.flatfileitemreader.resource=/test.txt", "spring.batch.job.flatfileitemreader.linesToSkip=2", "spring.batch.job.flatfileitemreader.fixedLength=true", "spring.batch.job.flatfileitemreader.ranges=3-4,7-8,11", "spring.batch.job.flatfileitemreader.names=foo,bar,baz", "spring.batch.job.flatfileitemreader.strict=true"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); ListItemWriter itemWriter = context.getBean(ListItemWriter.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } ListLineCallbackHandler callbackHandler = context.getBean(ListLineCallbackHandler.class); assertThat(callbackHandler.getLines().size()).isEqualTo(2); List writtenItems = itemWriter.getWrittenItems(); assertThat(writtenItems.size()).isEqualTo(2); }); } @Test public void testCustomMapping() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(CustomMappingConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, FlatFileItemReaderAutoConfiguration.class, RangeConverter.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.flatfileitemreader.name=fixedWidthConfiguration", "spring.batch.job.flatfileitemreader.resource=/test.txt", "spring.batch.job.flatfileitemreader.maxItemCount=1", "spring.batch.job.flatfileitemreader.strict=true"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); ListItemWriter itemWriter = context.getBean(ListItemWriter.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } List> writtenItems = itemWriter.getWrittenItems(); assertThat(writtenItems.size()).isEqualTo(1); assertThat(writtenItems.get(0).get("one")).isEqualTo("1 2 3"); assertThat(writtenItems.get(0).get("two")).isEqualTo("4 5 six"); }); } @Configuration public static class CustomMappingConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemWriter> itemWriter() { return new ListItemWriter<>(); } @Bean public LineTokenizer lineTokenizer() { return line -> new DefaultFieldSet(new String[] { line.substring(0, 5), line.substring(6) }, new String[] { "one", "two" }); } @Bean public FieldSetMapper> fieldSetMapper() { return fieldSet -> new HashMap((Map) fieldSet.getProperties()); } } @Configuration public static class JobConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemWriter> itemWriter() { return new ListItemWriter<>(); } } @Configuration public static class RecordSeparatorAndSkippedLinesJobConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public RecordSeparatorPolicy recordSeparatorPolicy() { return new RecordSeparatorPolicy() { @Override public boolean isEndOfRecord(String record) { boolean endOfRecord = false; int index = record.indexOf('\n'); if (index > 0 && record.length() > index + 1) { endOfRecord = true; } return endOfRecord; } @Override public String postProcess(String record) { return record; } @Override public String preProcess(String record) { return record + '\n'; } }; } @Bean public LineCallbackHandler lineCallbackHandler() { return new ListLineCallbackHandler(); } @Bean public ListItemWriter> itemWriter() { return new ListItemWriter<>(); } } @Configuration public static class CustomLineMapperConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public LineMapper> lineMapper() { return (line, lineNumber) -> { Map item = new HashMap<>(1); item.put("line", line); item.put("lineNumber", lineNumber); return item; }; } @Bean public ListItemWriter> itemWriter() { return new ListItemWriter<>(); } } public static class ListLineCallbackHandler implements LineCallbackHandler { private List lines = new ArrayList<>(); @Override public void handleLine(String line) { lines.add(line); } public List getLines() { return lines; } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/java/org/springframework/cloud/task/batch/autoconfigure/flatfile/FlatFileItemWriterAutoConfigurationTests.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.flatfile; import java.io.File; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.infrastructure.item.file.FlatFileFooterCallback; import org.springframework.batch.infrastructure.item.file.FlatFileHeaderCallback; import org.springframework.batch.infrastructure.item.file.FlatFileItemWriter; import org.springframework.batch.infrastructure.item.file.transform.FieldExtractor; import org.springframework.batch.infrastructure.item.file.transform.LineAggregator; import org.springframework.batch.infrastructure.item.file.transform.PassThroughLineAggregator; import org.springframework.batch.infrastructure.item.support.ListItemReader; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.DataSourceAutoConfiguration; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.batch.autoconfigure.SingleStepJobAutoConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.FileSystemResource; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.util.FileCopyUtils; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; /** * @author Michael Minella */ public class FlatFileItemWriterAutoConfigurationTests { private File outputFile; @BeforeEach public void setUp() throws Exception { this.outputFile = File.createTempFile("flatfile-config-test-output", ".tmp"); } @AfterEach public void tearDown() { this.outputFile.delete(); } @Test public void testValidation() { FlatFileItemWriterProperties properties = new FlatFileItemWriterProperties(); properties.setFormatted(true); properties.setDelimited(true); FlatFileItemWriterAutoConfiguration configuration = new FlatFileItemWriterAutoConfiguration(properties); try { configuration.itemWriter(); fail("Exception should have been thrown when both formatted and delimited are selected"); } catch (IllegalStateException ise) { assertThat(ise.getMessage()).isEqualTo("An output file must be either delimited or formatted or a custom " + "LineAggregator must be provided. Your current configuration specifies both delimited and formatted"); } catch (Exception e) { fail("Incorrect exception thrown", e); } properties.setFormatted(true); properties.setDelimited(false); ReflectionTestUtils.setField(configuration, "lineAggregator", new PassThroughLineAggregator<>()); try { configuration.itemWriter(); fail("Exception should have been thrown when a LineAggregator and one of the autocreated options are selected"); } catch (IllegalStateException ise) { assertThat(ise.getMessage()) .isEqualTo("A LineAggregator must be configured if the " + "output is not formatted or delimited"); } catch (Exception e) { fail("Incorrect exception thrown", e); } properties.setFormatted(false); properties.setDelimited(true); try { configuration.itemWriter(); fail("Exception should have been thrown when a LineAggregator and one of the autocreated options are selected"); } catch (IllegalStateException ise) { assertThat(ise.getMessage()) .isEqualTo("A LineAggregator must be configured if the " + "output is not formatted or delimited"); } catch (Exception e) { fail("Incorrect exception thrown", e); } } @Test public void testDelimitedFileGeneration() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(DelimitedJobConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, FlatFileItemWriterAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.flatfileitemwriter.name=fooWriter", String.format("spring.batch.job.flatfileitemwriter.resource=file://%s", this.outputFile.getAbsolutePath()), "spring.batch.job.flatfileitemwriter.encoding=UTF-16", "spring.batch.job.flatfileitemwriter.saveState=false", "spring.batch.job.flatfileitemwriter.shouldDeleteIfEmpty=true", "spring.batch.job.flatfileitemwriter.delimited=true", "spring.batch.job.flatfileitemwriter.names=item", "spring.batch.job.flatfileitemwriter.append=true", "spring.batch.job.flatfileitemwriter.forceSync=true", "spring.batch.job.flatfileitemwriter.shouldDeleteIfExists=false", "spring.batch.job.flatfileitemwriter.transactional=false"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } FlatFileItemWriter writer = context.getBean(FlatFileItemWriter.class); assertThat(Assertions.linesOf(this.outputFile, StandardCharsets.UTF_16).size()).isEqualTo(3); assertThat(Assertions.contentOf((new ClassPathResource("writerTestUTF16.txt")).getFile()) .equals(new FileSystemResource(this.outputFile))); assertThat((Boolean) ReflectionTestUtils.getField(writer, "saveState")).isFalse(); assertThat((Boolean) ReflectionTestUtils.getField(writer, "append")).isTrue(); assertThat((Boolean) ReflectionTestUtils.getField(writer, "forceSync")).isTrue(); assertThat((Boolean) ReflectionTestUtils.getField(writer, "shouldDeleteIfExists")).isFalse(); assertThat((Boolean) ReflectionTestUtils.getField(writer, "transactional")).isFalse(); }); } @Test public void testFormattedFileGeneration() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(FormattedJobConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, FlatFileItemWriterAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=2", "spring.batch.job.flatfileitemwriter.name=fooWriter", String.format("spring.batch.job.flatfileitemwriter.resource=file://%s", this.outputFile.getAbsolutePath()), "spring.batch.job.flatfileitemwriter.encoding=UTF-8", "spring.batch.job.flatfileitemwriter.formatted=true", "spring.batch.job.flatfileitemwriter.names=item", "spring.batch.job.flatfileitemwriter.format=item = %s", "spring.batch.job.flatfileitemwriter.minimumLength=8", "spring.batch.job.flatfileitemwriter.maximumLength=10"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } assertThat(Assertions.linesOf(this.outputFile).size()).isEqualTo(2); String results = FileCopyUtils .copyToString(new InputStreamReader(new FileSystemResource(this.outputFile).getInputStream())); assertThat(results).isEqualTo("item = foo\nitem = bar\n"); }); } @Test public void testFormattedFieldExtractorFileGeneration() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(FormattedFieldExtractorJobConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, FlatFileItemWriterAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.flatfileitemwriter.name=fooWriter", String.format("spring.batch.job.flatfileitemwriter.resource=file://%s", this.outputFile.getAbsolutePath()), "spring.batch.job.flatfileitemwriter.encoding=UTF-8", "spring.batch.job.flatfileitemwriter.formatted=true", "spring.batch.job.flatfileitemwriter.names=item", "spring.batch.job.flatfileitemwriter.format=item = %s"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } assertThat(Assertions.linesOf(this.outputFile).size()).isEqualTo(3); String results = FileCopyUtils .copyToString(new InputStreamReader(new FileSystemResource(this.outputFile).getInputStream())); assertThat(results).isEqualTo("item = f\nitem = b\nitem = b\n"); }); } @Test public void testFieldExtractorFileGeneration() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(FieldExtractorConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, FlatFileItemWriterAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.flatfileitemwriter.name=fooWriter", String.format("spring.batch.job.flatfileitemwriter.resource=file://%s", this.outputFile.getAbsolutePath()), "spring.batch.job.flatfileitemwriter.encoding=UTF-8", "spring.batch.job.flatfileitemwriter.delimited=true"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } assertThat(Assertions.linesOf(this.outputFile, StandardCharsets.UTF_8).size()).isEqualTo(3); String results = FileCopyUtils .copyToString(new InputStreamReader(new FileSystemResource(this.outputFile).getInputStream())); assertThat(results).isEqualTo("f\nb\nb\n"); }); } @Test public void testCustomLineAggregatorFileGeneration() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(LineAggregatorConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, FlatFileItemWriterAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.flatfileitemwriter.name=fooWriter", String.format("spring.batch.job.flatfileitemwriter.resource=file://%s", this.outputFile.getAbsolutePath()), "spring.batch.job.flatfileitemwriter.encoding=UTF-8"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } assertThat(Assertions.linesOf(this.outputFile, StandardCharsets.UTF_8).size()).isEqualTo(3); String results = FileCopyUtils .copyToString(new InputStreamReader(new FileSystemResource(this.outputFile).getInputStream())); assertThat(results).isEqualTo("{item=foo}\n{item=bar}\n{item=baz}\n"); }); } @Test public void testHeaderFooterFileGeneration() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(HeaderFooterConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, FlatFileItemWriterAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.flatfileitemwriter.name=fooWriter", String.format("spring.batch.job.flatfileitemwriter.resource=file://%s", this.outputFile.getAbsolutePath()), "spring.batch.job.flatfileitemwriter.encoding=UTF-8", "spring.batch.job.flatfileitemwriter.delimited=true", "spring.batch.job.flatfileitemwriter.names=item"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } assertThat(Assertions.linesOf(this.outputFile, StandardCharsets.UTF_8).size()).isEqualTo(5); String results = FileCopyUtils .copyToString(new InputStreamReader(new FileSystemResource(this.outputFile).getInputStream())); assertThat(results).isEqualTo("header\nfoo\nbar\nbaz\nfooter"); }); } @Configuration public static class DelimitedJobConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemReader> itemReader() { List> items = new ArrayList<>(3); items.add(Collections.singletonMap("item", "foo")); items.add(Collections.singletonMap("item", "bar")); items.add(Collections.singletonMap("item", "baz")); return new ListItemReader<>(items); } } @Configuration public static class LineAggregatorConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemReader> itemReader() { List> items = new ArrayList<>(3); items.add(Collections.singletonMap("item", "foo")); items.add(Collections.singletonMap("item", "bar")); items.add(Collections.singletonMap("item", "baz")); return new ListItemReader<>(items); } @Bean public LineAggregator> lineAggregator() { return new PassThroughLineAggregator<>(); } } @Configuration public static class HeaderFooterConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemReader> itemReader() { List> items = new ArrayList<>(3); items.add(Collections.singletonMap("item", "foo")); items.add(Collections.singletonMap("item", "bar")); items.add(Collections.singletonMap("item", "baz")); return new ListItemReader<>(items); } @Bean public FlatFileHeaderCallback headerCallback() { return writer -> writer.append("header"); } @Bean public FlatFileFooterCallback footerCallback() { return writer -> writer.append("footer"); } } @Configuration public static class FieldExtractorConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemReader> itemReader() { List> items = new ArrayList<>(3); items.add(Collections.singletonMap("item", "foo")); items.add(Collections.singletonMap("item", "bar")); items.add(Collections.singletonMap("item", "baz")); return new ListItemReader<>(items); } @Bean public FieldExtractor> lineAggregator() { return item -> { List fields = new ArrayList<>(1); fields.add(((String) item.get("item")).substring(0, 1)); return fields.toArray(); }; } } @Configuration public static class FormattedJobConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemReader> itemReader() { List> items = new ArrayList<>(3); items.add(Collections.singletonMap("item", "foo")); items.add(Collections.singletonMap("item", "bar")); items.add(Collections.singletonMap("item", "tooLong")); return new ListItemReader<>(items); } } @Configuration public static class FormattedFieldExtractorJobConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public FieldExtractor> lineAggregator() { return item -> { List fields = new ArrayList<>(1); fields.add(((String) item.get("item")).substring(0, 1)); return fields.toArray(); }; } @Bean public ListItemReader> itemReader() { List> items = new ArrayList<>(3); items.add(Collections.singletonMap("item", "foo")); items.add(Collections.singletonMap("item", "bar")); items.add(Collections.singletonMap("item", "baz")); return new ListItemReader<>(items); } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/java/org/springframework/cloud/task/batch/autoconfigure/jdbc/JdbcBatchItemWriterAutoConfigurationTests.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.jdbc; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.sql.DataSource; import org.h2.tools.Server; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.infrastructure.item.database.ItemPreparedStatementSetter; import org.springframework.batch.infrastructure.item.database.ItemSqlParameterSourceProvider; import org.springframework.batch.infrastructure.item.database.JdbcBatchItemWriter; import org.springframework.batch.infrastructure.item.support.ListItemReader; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.batch.autoconfigure.SingleStepJobAutoConfiguration; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.io.ClassPathResource; import org.springframework.jdbc.BadSqlGrammarException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.SqlTypeValue; import org.springframework.jdbc.core.StatementCreatorUtils; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; import org.springframework.jdbc.datasource.DriverManagerDataSource; import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.test.util.TestSocketUtils; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; public class JdbcBatchItemWriterAutoConfigurationTests { private final static String DATASOURCE_URL; private final static String DATASOURCE_USER_NAME = "SA"; private final static String DATASOURCE_USER_PASSWORD = "''"; private final static String DATASOURCE_DRIVER_CLASS_NAME = "org.h2.Driver"; private static int randomPort; static { randomPort = TestSocketUtils.findAvailableTcpPort(); DATASOURCE_URL = "jdbc:h2:tcp://localhost:" + randomPort + "/mem:dataflow;DB_CLOSE_DELAY=-1;" + "DB_CLOSE_ON_EXIT=FALSE"; } @AfterEach public void clearDB() { DriverManagerDataSource dataSource = new DriverManagerDataSource(); dataSource.setDriverClassName(DATASOURCE_DRIVER_CLASS_NAME); dataSource.setUrl(DATASOURCE_URL); dataSource.setUsername(DATASOURCE_USER_NAME); dataSource.setPassword(DATASOURCE_USER_PASSWORD); JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); jdbcTemplate.execute("TRUNCATE TABLE item"); try { jdbcTemplate.execute("DELETE FROM BATCH_STEP_EXECUTION_CONTEXT"); jdbcTemplate.execute("DELETE FROM BATCH_STEP_EXECUTION"); jdbcTemplate.execute("DELETE FROM BATCH_JOB_EXECUTION_PARAMS"); jdbcTemplate.execute("DELETE FROM BATCH_JOB_EXECUTION_CONTEXT"); jdbcTemplate.execute("DELETE FROM BATCH_JOB_EXECUTION"); jdbcTemplate.execute("DELETE FROM BATCH_JOB_INSTANCE"); } catch (BadSqlGrammarException e) { System.out.println("No tables to cleanup"); } } @Test public void baseTest() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskLauncherConfiguration.class, JdbcBatchItemWriterAutoConfigurationTests.DelimitedJobConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, JdbcBatchItemWriterAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jdbcsinglestep.datasource.enable=false"); applicationContextRunner = updatePropertiesForTest(applicationContextRunner); runTest(applicationContextRunner, false); } @Test public void baseTestWithWriterDataSource() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskLauncherConfiguration.class, JdbcBatchItemWriterAutoConfigurationTests.DelimitedJobConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, JdbcBatchItemWriterAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jdbcbatchitemwriter.datasource.enable=true", "spring.batch.job.jdbcsinglestep.datasource.enable=false", "spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.jdbcbatchitemwriter.name=fooWriter", "spring.batch.job.jdbcbatchitemwriter.sql=INSERT INTO item (item_name) VALUES (:item_name)", "spring.batch.jdbc.initialize-schema=always", "jdbcbatchitemwriter.datasource.url=" + DATASOURCE_URL, "jdbcbatchitemwriter.datasource.username=" + DATASOURCE_USER_NAME, "jdbcbatchitemwriter.datasource.password=" + DATASOURCE_USER_PASSWORD, "jdbcbatchitemwriter.datasource.driverClassName=" + DATASOURCE_DRIVER_CLASS_NAME); runTest(applicationContextRunner, true); } @Test public void customSqlParameterSourceTest() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskLauncherConfiguration.class, JdbcBatchItemWriterAutoConfigurationTests.DelimitedDifferentKeyNameJobConfiguration.class, CustomSqlParameterSourceProviderConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, JdbcBatchItemWriterAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jdbcsinglestep.datasource.enable=false"); applicationContextRunner = updatePropertiesForTest(applicationContextRunner); runTest(applicationContextRunner, false); } @Test public void preparedStatementSetterTest() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskLauncherConfiguration.class, JdbcBatchItemWriterAutoConfigurationTests.DelimitedJobConfiguration.class, ItemPreparedStatementSetterConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, JdbcBatchItemWriterAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jdbcsinglestep.datasource.enable=false"); applicationContextRunner = updatePropertiesForTest(applicationContextRunner); runTest(applicationContextRunner, false); } private ApplicationContextRunner updatePropertiesForTest(ApplicationContextRunner applicationContextRunner) { return applicationContextRunner.withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.jdbcbatchitemwriter.name=fooWriter", "spring.batch.job.jdbcbatchitemwriter.sql=INSERT INTO item (item_name) VALUES (:item_name)", "spring.batch.jdbc.initialize-schema=always"); } private void validateResultAndBean(ApplicationContext context, boolean isWriterDataSourcePresent) { DataSource dataSource; try { dataSource = context.getBean(DataSource.class); } catch (NoSuchBeanDefinitionException nde) { dataSource = context.getBean("jdbcBatchItemWriterSpringDataSource", DataSource.class); } JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); List> result = jdbcTemplate.queryForList("SELECT item_name FROM item ORDER BY item_name"); assertThat(result.size()).isEqualTo(3); assertThat(result.get(0).get("item_name")).isEqualTo("bar"); assertThat(result.get(1).get("item_name")).isEqualTo("baz"); assertThat(result.get(2).get("item_name")).isEqualTo("foo"); JdbcBatchItemWriter writer = context.getBean(JdbcBatchItemWriter.class); assertThat((Boolean) ReflectionTestUtils.getField(writer, "assertUpdates")).isTrue(); assertThat((Integer) ReflectionTestUtils.getField(writer, "parameterCount")).isEqualTo(1); assertThat((Boolean) ReflectionTestUtils.getField(writer, "usingNamedParameters")).isTrue(); if (!isWriterDataSourcePresent) { assertThatThrownBy(() -> context.getBean("jdbcBatchItemWriterSpringDataSource")) .isInstanceOf(NoSuchBeanDefinitionException.class) .hasMessageContaining("No bean named 'jdbcBatchItemWriterSpringDataSource' available"); } else { assertThat(context.getBean("jdbcBatchItemWriterSpringDataSource")).isNotNull(); } } private void runTest(ApplicationContextRunner applicationContextRunner, boolean isWriterDataSourcePresent) { applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } validateResultAndBean(context, isWriterDataSourcePresent); }); } @Configuration public static class TaskLauncherConfiguration { private static Server defaultServer; @Bean public Server initH2TCPServer() { Server server = null; try { if (defaultServer == null) { server = Server .createTcpServer("-ifNotExists", "-tcp", "-tcpAllowOthers", "-tcpPort", String.valueOf(randomPort)) .start(); defaultServer = server; DriverManagerDataSource dataSource = new DriverManagerDataSource(); dataSource.setDriverClassName(DATASOURCE_DRIVER_CLASS_NAME); dataSource.setUrl(DATASOURCE_URL); dataSource.setUsername(DATASOURCE_USER_NAME); dataSource.setPassword(DATASOURCE_USER_PASSWORD); ClassPathResource setupResource = new ClassPathResource("schema-h2.sql"); ResourceDatabasePopulator resourceDatabasePopulator = new ResourceDatabasePopulator(setupResource); resourceDatabasePopulator.execute(dataSource); } } catch (SQLException e) { throw new IllegalStateException(e); } return defaultServer; } @Bean public DataSource dataSource(Server server) { DriverManagerDataSource dataSource = new DriverManagerDataSource(); dataSource.setDriverClassName(DATASOURCE_DRIVER_CLASS_NAME); dataSource.setUrl(DATASOURCE_URL); dataSource.setUsername(DATASOURCE_USER_NAME); dataSource.setPassword(DATASOURCE_USER_PASSWORD); return dataSource; } } @Configuration public static class DelimitedJobConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemReader> itemReader() { List> items = new ArrayList<>(3); items.add(Collections.singletonMap("item_name", "foo")); items.add(Collections.singletonMap("item_name", "bar")); items.add(Collections.singletonMap("item_name", "baz")); return new ListItemReader<>(items); } } @Configuration public static class DelimitedDifferentKeyNameJobConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemReader> itemReader() { List> items = new ArrayList<>(3); items.add(Collections.singletonMap("item_foo", "foo")); items.add(Collections.singletonMap("item_foo", "bar")); items.add(Collections.singletonMap("item_foo", "baz")); return new ListItemReader<>(items); } } @Configuration public static class CustomSqlParameterSourceProviderConfiguration { @Bean public ItemSqlParameterSourceProvider> itemSqlParameterSourceProvider() { return item -> new MapSqlParameterSource(new HashMap() { { put("item_name", item.get("item_foo")); } }); } } @Configuration public static class ItemPreparedStatementSetterConfiguration { @Bean public ItemPreparedStatementSetter itemPreparedStatementSetter() { return new ItemPreparedStatementSetter() { @Override public void setValues(Object item, PreparedStatement ps) throws SQLException { Map mapItem = (Map) item; StatementCreatorUtils.setParameterValue(ps, 1, SqlTypeValue.TYPE_UNKNOWN, mapItem.get("item_name")); } }; } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/java/org/springframework/cloud/task/batch/autoconfigure/jdbc/JdbcCursorItemReaderAutoConfigurationTests.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.jdbc; import java.sql.SQLException; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.sql.DataSource; import org.h2.tools.Server; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.infrastructure.item.database.JdbcCursorItemReader; import org.springframework.batch.infrastructure.item.support.ListItemWriter; import org.springframework.batch.infrastructure.item.util.ExecutionContextUserSupport; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.AutoConfigureBefore; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.batch.autoconfigure.SingleStepJobAutoConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.io.ClassPathResource; import org.springframework.jdbc.BadSqlGrammarException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; import org.springframework.jdbc.datasource.DriverManagerDataSource; import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.test.util.TestSocketUtils; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * @author Michael Minella * @author Glenn Renfro */ public class JdbcCursorItemReaderAutoConfigurationTests { private final static String DATASOURCE_URL; private final static String DATASOURCE_USER_NAME = "SA"; private final static String DATASOURCE_USER_PASSWORD = "''"; private final static String DATASOURCE_DRIVER_CLASS_NAME = "org.h2.Driver"; private static int randomPort; static { randomPort = TestSocketUtils.findAvailableTcpPort(); DATASOURCE_URL = "jdbc:h2:tcp://localhost:" + randomPort + "/mem:dataflow;DB_CLOSE_DELAY=-1;" + "DB_CLOSE_ON_EXIT=FALSE"; } @AfterAll public static void clearDB() { DriverManagerDataSource dataSource = new DriverManagerDataSource(); dataSource.setDriverClassName(DATASOURCE_DRIVER_CLASS_NAME); dataSource.setUrl(DATASOURCE_URL); dataSource.setUsername(DATASOURCE_USER_NAME); dataSource.setPassword(DATASOURCE_USER_PASSWORD); JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); jdbcTemplate.execute("TRUNCATE TABLE item"); try { jdbcTemplate.execute("DROP TABLE BATCH_JOB_EXECUTION CASCADE"); jdbcTemplate.execute("DROP TABLE BATCH_JOB_INSTANCE CASCADE"); } catch (BadSqlGrammarException e) { System.out.println("No tables to cleanup"); } } @Test public void testIntegration() { ApplicationContextRunner applicationContextRunner = applicationContextRunner().withPropertyValues( "spring.batch.job.jobName=integrationJob", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.jdbccursoritemreader.name=fooReader", "spring.batch.job.jdbccursoritemreader.sql=select item_name from item", "spring.batch.jdbc.initialize-schema=always", "spring.batch.job.jdbcsinglestep.datasource.enable=false"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } List> items = context.getBean(ListItemWriter.class).getWrittenItems(); assertThat(items.size()).isEqualTo(3); assertThat(items.get(0).get("ITEM_NAME")).isEqualTo("foo"); assertThat(items.get(1).get("ITEM_NAME")).isEqualTo("bar"); assertThat(items.get(2).get("ITEM_NAME")).isEqualTo("baz"); assertThatThrownBy(() -> context.getBean("readerSpringDataSource")) .isInstanceOf(NoSuchBeanDefinitionException.class) .hasMessageContaining("No bean named 'readerSpringDataSource' available"); }); } private ApplicationContextRunner applicationContextRunner() { return new ApplicationContextRunner() .withUserConfiguration(TaskLauncherConfiguration.class, BaseConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, JdbcCursorItemReaderAutoConfiguration.class)); } @Test public void testIntegrationReaderDataSourceEnabled() { ApplicationContextRunner applicationContextRunner = applicationContextRunner().withPropertyValues( "spring.batch.job.jobName=integrationReaderJob", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.jdbccursoritemreader.name=fooReader", "spring.batch.job.jdbccursoritemreader.sql=select item_name from item", "spring.batch.jdbc.initialize-schema=always", "spring.batch.job.jdbcsinglestep.datasource.enable=false", "spring.batch.job.jdbccursoritemreader.datasource.enable=true", "jdbccursoritemreader.datasource.url=" + DATASOURCE_URL, "jdbccursoritemreader.datasource.username=" + DATASOURCE_USER_NAME, "jdbccursoritemreader.datasource.password=" + DATASOURCE_USER_PASSWORD, "jdbccursoritemreader.datasource.driverClassName=" + DATASOURCE_DRIVER_CLASS_NAME); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } List> items = context.getBean(ListItemWriter.class).getWrittenItems(); assertThat(items.size()).isEqualTo(3); assertThat(items.get(0).get("ITEM_NAME")).isEqualTo("foo"); assertThat(items.get(1).get("ITEM_NAME")).isEqualTo("bar"); assertThat(items.get(2).get("ITEM_NAME")).isEqualTo("baz"); assertThat(context.getBean("jdbcCursorItemReaderSpringDataSource")).isNotNull(); }); } @Test public void testCustomRowMapper() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskLauncherConfiguration.class, RowMapperConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, JdbcCursorItemReaderAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=rowMapperJob", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.jdbccursoritemreader.name=fooReader", "spring.batch.job.jdbccursoritemreader.sql=select * from item", "spring.batch.jdbc.initialize-schema=always", "spring.batch.job.jdbcsinglestep.datasource.enable=false"); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } List> items = context.getBean(ListItemWriter.class).getWrittenItems(); assertThat(items.size()).isEqualTo(3); assertThat(items.get(0).get("item")).isEqualTo("foo"); assertThat(items.get(1).get("item")).isEqualTo("bar"); assertThat(items.get(2).get("item")).isEqualTo("baz"); }); } @Test public void testRoseyScenario() { final ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskLauncherConfiguration.class, BaseConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, JdbcCursorItemReaderAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=roseyJob", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.jdbccursoritemreader.saveState=false", "spring.batch.job.jdbccursoritemreader.name=fooReader", "spring.batch.job.jdbccursoritemreader.maxItemCount=15", "spring.batch.job.jdbccursoritemreader.currentItemCount=2", "spring.batch.job.jdbccursoritemreader.fetchSize=4", "spring.batch.job.jdbccursoritemreader.maxRows=6", "spring.batch.job.jdbccursoritemreader.queryTimeout=8", "spring.batch.job.jdbccursoritemreader.ignoreWarnings=true", "spring.batch.job.jdbccursoritemreader.verifyCursorPosition=true", "spring.batch.job.jdbccursoritemreader.driverSupportsAbsolute=true", "spring.batch.job.jdbccursoritemreader.useSharedExtendedConnection=true", "spring.batch.job.jdbccursoritemreader.sql=select * from foo"); applicationContextRunner.run((context) -> { JdbcCursorItemReader> itemReader = context.getBean(JdbcCursorItemReader.class); validateBean(itemReader); }); } private void validateBean(JdbcCursorItemReader itemReader) { assertThat(itemReader.getSql()).isEqualTo("select * from foo"); assertThat(itemReader.getDataSource()).isNotNull(); assertThat((Boolean) ReflectionTestUtils.getField(itemReader, "saveState")).isFalse(); assertThat(ReflectionTestUtils.getField( (ExecutionContextUserSupport) ReflectionTestUtils.getField(itemReader, "executionContextUserSupport"), "name")) .isEqualTo("fooReader"); assertThat((Integer) ReflectionTestUtils.getField(itemReader, "maxItemCount")).isEqualTo(15); assertThat((Integer) ReflectionTestUtils.getField(itemReader, "currentItemCount")).isEqualTo(2); assertThat((Integer) ReflectionTestUtils.getField(itemReader, "fetchSize")).isEqualTo(4); assertThat((Integer) ReflectionTestUtils.getField(itemReader, "maxRows")).isEqualTo(6); assertThat((Integer) ReflectionTestUtils.getField(itemReader, "queryTimeout")).isEqualTo(8); assertThat((Boolean) ReflectionTestUtils.getField(itemReader, "ignoreWarnings")).isTrue(); assertThat((Boolean) ReflectionTestUtils.getField(itemReader, "verifyCursorPosition")).isTrue(); assertThat((Boolean) ReflectionTestUtils.getField(itemReader, "driverSupportsAbsolute")).isTrue(); assertThat((Boolean) ReflectionTestUtils.getField(itemReader, "useSharedExtendedConnection")).isTrue(); } @Test public void testNoName() { final ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskLauncherConfiguration.class, BaseConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, JdbcCursorItemReaderAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=noNameJob", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5"); assertThatThrownBy(() -> { runTest(applicationContextRunner); }).isInstanceOf(IllegalStateException.class).hasMessageContaining("UnsatisfiedDependencyException"); } @Test public void testSqlName() { final ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskLauncherConfiguration.class, BaseConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, JdbcCursorItemReaderAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.jdbccursoritemreader.name=fooReader"); assertThatThrownBy(() -> { runTest(applicationContextRunner); }).isInstanceOf(IllegalStateException.class).hasMessageContaining("UnsatisfiedDependencyException"); } private void runTest(ApplicationContextRunner applicationContextRunner) { applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } }); } @AutoConfigureBefore({ JdbcCursorItemReaderAutoConfiguration.class, JDBCSingleStepDataSourceAutoConfiguration.class }) @Configuration public static class TaskLauncherConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } private static Server defaultServer; @Bean public Server initH2TCPServer() { Server server = null; try { if (defaultServer == null) { server = Server .createTcpServer("-ifNotExists", "-tcp", "-tcpAllowOthers", "-tcpPort", String.valueOf(randomPort)) .start(); defaultServer = server; DriverManagerDataSource dataSource = new DriverManagerDataSource(); dataSource.setDriverClassName(DATASOURCE_DRIVER_CLASS_NAME); dataSource.setUrl(DATASOURCE_URL); dataSource.setUsername(DATASOURCE_USER_NAME); dataSource.setPassword(DATASOURCE_USER_PASSWORD); ClassPathResource setupResource = new ClassPathResource("schema-h2.sql"); ResourceDatabasePopulator resourceDatabasePopulator = new ResourceDatabasePopulator(setupResource); resourceDatabasePopulator.setContinueOnError(true); resourceDatabasePopulator.execute(dataSource); JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); jdbcTemplate.execute("TRUNCATE TABLE item"); jdbcTemplate.execute("INSERT INTO item VALUES ('foo')"); jdbcTemplate.execute("INSERT INTO item VALUES ('bar')"); jdbcTemplate.execute("INSERT INTO item VALUES ('baz')"); } } catch (SQLException e) { throw new IllegalStateException(e); } return defaultServer; } @Bean public DataSource dataSource(Server server) { DriverManagerDataSource dataSource = new DriverManagerDataSource(); dataSource.setDriverClassName(DATASOURCE_DRIVER_CLASS_NAME); dataSource.setUrl(DATASOURCE_URL); dataSource.setUsername(DATASOURCE_USER_NAME); dataSource.setPassword(DATASOURCE_USER_PASSWORD); return dataSource; } } @Configuration public static class BaseConfiguration { @Bean public ListItemWriter> itemWriter() { return new ListItemWriter<>(); } } @Configuration public static class RowMapperConfiguration { @Bean public RowMapper> rowMapper() { return (rs, rowNum) -> { Map item = new HashMap<>(); item.put("item", rs.getString("item_name")); return item; }; } @Bean public ListItemWriter> itemWriter() { return new ListItemWriter<>(); } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/java/org/springframework/cloud/task/batch/autoconfigure/kafka/KafkaItemReaderAutoConfigurationTests.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.kafka; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.BiConsumer; import java.util.function.Consumer; import org.apache.kafka.clients.admin.NewTopic; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.StringSerializer; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.infrastructure.item.support.ListItemWriter; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.DataSourceAutoConfiguration; import org.springframework.boot.ssl.NoSuchSslBundleException; import org.springframework.boot.ssl.SslBundle; import org.springframework.boot.ssl.SslBundles; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.batch.autoconfigure.SingleStepJobAutoConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.kafka.core.DefaultKafkaProducerFactory; import org.springframework.kafka.support.serializer.JsonDeserializer; import org.springframework.kafka.support.serializer.JsonSerializer; import org.springframework.kafka.test.EmbeddedKafkaBroker; import org.springframework.kafka.test.context.EmbeddedKafka; import org.springframework.kafka.test.utils.KafkaTestUtils; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; @EmbeddedKafka(partitions = 1, topics = { "test" }) public class KafkaItemReaderAutoConfigurationTests { private static EmbeddedKafkaBroker embeddedKafkaBroker; @BeforeAll public static void setupTest(EmbeddedKafkaBroker embeddedKafka) { embeddedKafkaBroker = embeddedKafka; embeddedKafka.addTopics(new NewTopic("topic1", 1, (short) 1), new NewTopic("topic2", 2, (short) 1), new NewTopic("topic3", 1, (short) 1)); } @Test public void testBaseKafkaItemReader() { final String topicName = "topic1"; populateSingleTopic(topicName); ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(CustomMappingConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, KafkaItemReaderAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.kafka.consumer.bootstrap-servers=" + embeddedKafkaBroker.getBrokersAsString(), "spring.kafka.consumer.group-id=1", "spring.batch.job.kafkaitemreader.name=kafkaItemReader", "spring.batch.job.kafkaitemreader.poll-time-out-in-seconds=2", "spring.batch.job.kafkaitemreader.topic=" + topicName, "spring.kafka.consumer.value-deserializer=" + JsonDeserializer.class.getName()); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); ListItemWriter itemWriter = context.getBean(ListItemWriter.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } List> writtenItems = itemWriter.getWrittenItems(); assertThat(writtenItems.size()).isEqualTo(4); assertThat(writtenItems.get(0).get("first_name")).isEqualTo("jane"); assertThat(writtenItems.get(1).get("first_name")).isEqualTo("john"); assertThat(writtenItems.get(2).get("first_name")).isEqualTo("susan"); assertThat(writtenItems.get(3).get("first_name")).isEqualTo("jim"); }); } @Test public void testBaseKafkaItemReaderMultiplePartitions() { final String topicName = "topic2"; populateSingleTopic(topicName); ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(CustomMappingConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, KafkaItemReaderAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.kafka.consumer.bootstrap-servers=" + embeddedKafkaBroker.getBrokersAsString(), "spring.kafka.consumer.group-id=1", "spring.batch.job.kafkaitemreader.name=kafkaItemReader", "spring.batch.job.kafkaitemreader.partitions=0,1", "spring.batch.job.kafkaitemreader.poll-time-out-in-seconds=2", "spring.batch.job.kafkaitemreader.topic=" + topicName, "spring.kafka.consumer.value-deserializer=" + JsonDeserializer.class.getName()); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); ListItemWriter itemWriter = context.getBean(ListItemWriter.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } basicValidation(itemWriter); }); } @Test public void testBaseKafkaItemReaderPollTimeoutDefault() { final String topicName = "topic3"; populateSingleTopic(topicName); ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(CustomMappingConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, KafkaItemReaderAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.kafka.consumer.bootstrap-servers=" + embeddedKafkaBroker.getBrokersAsString(), "spring.kafka.consumer.group-id=1", "spring.batch.job.kafkaitemreader.name=kafkaItemReader", "spring.batch.job.kafkaitemreader.topic=" + topicName, "spring.kafka.consumer.value-deserializer=" + JsonDeserializer.class.getName()); Date startTime = new Date(); applicationContextRunner.run((context) -> { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); ListItemWriter itemWriter = context.getBean(ListItemWriter.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } Date endTime = new Date(); long seconds = (endTime.getTime() - startTime.getTime()) / 1000; assertThat(seconds).isGreaterThanOrEqualTo(30); basicValidation(itemWriter); }); } private void basicValidation(ListItemWriter itemWriter) { List> writtenItems = itemWriter.getWrittenItems(); assertThat(writtenItems.size()).isEqualTo(4); List results = new ArrayList<>(); for (int i = 0; i < 4; i++) { results.add(writtenItems.get(i).get("first_name")); } assertThat(results).contains("jane", "john", "susan", "jim"); } private void populateSingleTopic(String topic) { Map configps = new HashMap<>(KafkaTestUtils.producerProps(embeddedKafkaBroker)); Producer producer = new DefaultKafkaProducerFactory<>(configps, new StringSerializer(), new JsonSerializer<>()) .createProducer(); Map testMap = new HashMap<>(); testMap.put("first_name", "jane"); producer.send(new ProducerRecord<>(topic, "my-aggregate-id", testMap)); testMap = new HashMap<>(); testMap.put("first_name", "john"); producer.send(new ProducerRecord<>(topic, "my-aggregate-id", testMap)); testMap = new HashMap<>(); testMap.put("first_name", "susan"); producer.send(new ProducerRecord<>(topic, "my-aggregate-id", testMap)); testMap = new HashMap<>(); testMap.put("first_name", "jim"); producer.send(new ProducerRecord<>(topic, "my-aggregate-id", testMap)); producer.flush(); producer.close(); } @Configuration public static class CustomMappingConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemWriter> itemWriter() { return new ListItemWriter<>(); } @Bean public SslBundles sslBundles() { return new SslBundles() { @Override public SslBundle getBundle(String name) throws NoSuchSslBundleException { return null; } @Override public void addBundleUpdateHandler(String name, Consumer updateHandler) throws NoSuchSslBundleException { } @Override public void addBundleRegisterHandler(BiConsumer registerHandler) { } @Override public List getBundleNames() { return List.of(); } }; } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/java/org/springframework/cloud/task/batch/autoconfigure/kafka/KafkaItemWriterTests.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.kafka; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.BiConsumer; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.common.serialization.StringDeserializer; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.infrastructure.item.support.ListItemReader; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.DataSourceAutoConfiguration; import org.springframework.boot.ssl.NoSuchSslBundleException; import org.springframework.boot.ssl.SslBundle; import org.springframework.boot.ssl.SslBundles; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.batch.autoconfigure.SingleStepJobAutoConfiguration; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; import org.springframework.kafka.support.serializer.JsonDeserializer; import org.springframework.kafka.test.EmbeddedKafkaBroker; import org.springframework.kafka.test.context.EmbeddedKafka; import org.springframework.kafka.test.utils.KafkaTestUtils; import org.springframework.transaction.PlatformTransactionManager; import static java.util.Collections.singleton; import static org.assertj.core.api.Assertions.assertThat; @EmbeddedKafka(partitions = 1, topics = { "topic1" }) public class KafkaItemWriterTests { private static EmbeddedKafkaBroker embeddedKafkaBroker; @BeforeAll public static void setupTest(EmbeddedKafkaBroker embeddedKafka) { embeddedKafkaBroker = embeddedKafka; embeddedKafka.addTopics("topic2"); } @Test public void testBaseKafkaItemWriter() { final String topicName = "topic1"; ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(CustomMappingConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, KafkaItemWriterAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=job", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.kafka.producer.bootstrap-servers=" + embeddedKafkaBroker.getBrokersAsString(), "spring.kafka.producer.keySerializer=org.springframework.kafka.support.serializer.JsonSerializer", "spring.batch.job.kafkaitemwriter.topic=" + topicName); applicationContextRunner.run((context) -> { waitForTopicPopulation(context); validateResults(topicName); }); } private void validateResults(String topicName) { Map configs = new HashMap<>(KafkaTestUtils.consumerProps("1", "false", embeddedKafkaBroker)); Consumer consumer = new DefaultKafkaConsumerFactory<>(configs, new StringDeserializer(), new JsonDeserializer<>()) .createConsumer(); consumer.subscribe(singleton(topicName)); ConsumerRecords consumerRecords = KafkaTestUtils.getRecords(consumer); assertThat(consumerRecords.count()).isEqualTo(5); List> result = new ArrayList<>(); consumerRecords.forEach(cs -> { result.add((Map) cs.value()); }); List firstNames = new ArrayList<>(); result.forEach(s -> firstNames.add((String) s.get("first_name"))); assertThat(firstNames.size()).isEqualTo(5); assertThat(firstNames).contains("Jane"); assertThat(firstNames).contains("John"); assertThat(firstNames).contains("Liz"); assertThat(firstNames).contains("Cameron"); assertThat(firstNames).contains("Judy"); } private void waitForTopicPopulation(ApplicationContext context) throws Exception { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); JobExecution jobExecution = jobOperator.start(job, new JobParameters()); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } } @Configuration public static class CustomMappingConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemReader> itemReader() { List> list = new ArrayList<>(5); addNameToReaderList(list, "Jane"); addNameToReaderList(list, "John"); addNameToReaderList(list, "Liz"); addNameToReaderList(list, "Cameron"); addNameToReaderList(list, "Judy"); return new ListItemReader<>(list); } private void addNameToReaderList(List> itemReaderList, String value) { Map prepMap = new HashMap<>(); prepMap.put("first_name", value); itemReaderList.add(prepMap); } @Bean public SslBundles sslBundles() { return new SslBundles() { @Override public SslBundle getBundle(String name) throws NoSuchSslBundleException { return null; } @Override public void addBundleUpdateHandler(String name, java.util.function.Consumer updateHandler) throws NoSuchSslBundleException { } @Override public void addBundleRegisterHandler(BiConsumer registerHandler) { } @Override public List getBundleNames() { return List.of(); } }; } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/java/org/springframework/cloud/task/batch/autoconfigure/rabbit/AmqpItemReaderAutoConfigurationTests.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.rabbit; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.testcontainers.containers.GenericContainer; import org.testcontainers.rabbitmq.RabbitMQContainer; import org.springframework.amqp.core.AmqpAdmin; import org.springframework.amqp.core.AmqpTemplate; import org.springframework.amqp.core.Queue; import org.springframework.amqp.rabbit.connection.CachingConnectionFactory; import org.springframework.amqp.rabbit.connection.ConnectionFactory; import org.springframework.amqp.rabbit.core.RabbitAdmin; import org.springframework.amqp.rabbit.core.RabbitTemplate; import org.springframework.amqp.support.converter.JacksonJsonMessageConverter; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.infrastructure.item.support.ListItemWriter; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.boot.amqp.autoconfigure.RabbitAutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.DataSourceAutoConfiguration; import org.springframework.boot.test.context.assertj.AssertableApplicationContext; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.batch.autoconfigure.SingleStepJobAutoConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; @Tag("DockerRequired") public class AmqpItemReaderAutoConfigurationTests { private static int amqpPort; private static String host; private RabbitTemplate template; private ConnectionFactory connectionFactory; static { GenericContainer rabbitmq = new RabbitMQContainer("rabbitmq:3.8.9").withExposedPorts(5672); rabbitmq.start(); final Integer mappedPort = rabbitmq.getMappedPort(5672); host = rabbitmq.getHost(); amqpPort = mappedPort; } @BeforeEach void setupTest() { this.connectionFactory = new CachingConnectionFactory(host, amqpPort); this.template = new RabbitTemplate(this.connectionFactory); this.template.setMessageConverter(new JacksonJsonMessageConverter()); AmqpAdmin admin = new RabbitAdmin(this.connectionFactory); admin.declareQueue(new Queue("foo")); Map testMap = new HashMap<>(); testMap.put("ITEM_NAME", "foo"); this.template.convertAndSend("foo", testMap); testMap = new HashMap<>(); testMap.put("ITEM_NAME", "bar"); this.template.convertAndSend("foo", testMap); testMap = new HashMap<>(); testMap.put("ITEM_NAME", "baz"); this.template.convertAndSend("foo", testMap); } @AfterEach void teardownTest() { AmqpAdmin admin = new RabbitAdmin(this.connectionFactory); admin.deleteQueue("foo"); this.template.destroy(); } @Test void basicTest() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(BaseConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, AmqpItemReaderAutoConfiguration.class, RabbitAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=integrationJob", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.amqpitemreader.enabled=true", "spring.rabbitmq.template.default-receive-queue=foo", "spring.rabbitmq.host=" + host, "spring.rabbitmq.port=" + amqpPort); applicationContextRunner.run((context) -> { JobExecution jobExecution = runJob(context); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } validateBasicTest(context.getBean(ListItemWriter.class).getWrittenItems()); }); } @Test void basicTestWithItemType() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(ItemTypeConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, AmqpItemReaderAutoConfiguration.class, RabbitAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=integrationJob", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.amqpitemreader.enabled=true", "spring.rabbitmq.template.default-receive-queue=foo", "spring.rabbitmq.host=" + host, "spring.rabbitmq.port=" + amqpPort); applicationContextRunner.run((context) -> { JobExecution jobExecution = runJob(context); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } validateBasicTest(context.getBean(ListItemWriter.class).getWrittenItems()); }); } @Test void useAmqpTemplateTest() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(MockTemplateConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, AmqpItemReaderAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues("spring.batch.job.jobName=integrationJob", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.batch.job.amqpitemreader.enabled=true", "spring.rabbitmq.host=" + host, "spring.rabbitmq.port=" + amqpPort); applicationContextRunner.run((context) -> { runJob(context); AmqpTemplate amqpTemplate = context.getBean(AmqpTemplate.class); Mockito.verify(amqpTemplate, Mockito.times(1)).receiveAndConvert(); }); } private JobExecution runJob(AssertableApplicationContext context) throws Exception { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); return jobOperator.start(job, new JobParameters()); } private void validateBasicTest(List> items) { assertThat(items.size()).isEqualTo(3); assertThat(items.get(0).get("ITEM_NAME")).isEqualTo("foo"); assertThat(items.get(1).get("ITEM_NAME")).isEqualTo("bar"); assertThat(items.get(2).get("ITEM_NAME")).isEqualTo("baz"); } public static class MockTemplateConfiguration extends BaseConfiguration { @Bean AmqpTemplate amqpTemplateBean() { return Mockito.mock(AmqpTemplate.class); }; } public static class ItemTypeConfiguration extends BaseConfiguration { @Bean Class itemTypeClass() { return Map.class; } } @Configuration public static class BaseConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public ListItemWriter> itemWriter() { return new ListItemWriter<>(); } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/java/org/springframework/cloud/task/batch/autoconfigure/rabbit/AmqpItemWriterAutoConfigurationTests.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.autoconfigure.rabbit; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.testcontainers.containers.GenericContainer; import org.testcontainers.rabbitmq.RabbitMQContainer; import org.springframework.amqp.core.AmqpAdmin; import org.springframework.amqp.core.AmqpTemplate; import org.springframework.amqp.core.Binding; import org.springframework.amqp.core.Queue; import org.springframework.amqp.core.TopicExchange; import org.springframework.amqp.rabbit.connection.CachingConnectionFactory; import org.springframework.amqp.rabbit.connection.ConnectionFactory; import org.springframework.amqp.rabbit.core.RabbitAdmin; import org.springframework.amqp.rabbit.core.RabbitTemplate; import org.springframework.amqp.support.converter.JacksonJsonMessageConverter; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.infrastructure.item.ItemReader; import org.springframework.batch.infrastructure.item.support.ListItemReader; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.boot.amqp.autoconfigure.RabbitAutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.DataSourceAutoConfiguration; import org.springframework.boot.test.context.assertj.AssertableApplicationContext; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.batch.autoconfigure.SingleStepJobAutoConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.jdbc.core.RowMapper; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; @Tag("DockerRequired") public class AmqpItemWriterAutoConfigurationTests { private final static String QUEUE_NAME = "foo"; private final static String EXCHANGE_NAME = "fooexchange"; private static int amqpPort; private static String host; private static List> sampleData; private RabbitTemplate template; private ConnectionFactory connectionFactory; private String[] configurations; static { GenericContainer rabbitmq = new RabbitMQContainer("rabbitmq:3.8.9").withExposedPorts(5672); rabbitmq.start(); final Integer mappedPort = rabbitmq.getMappedPort(5672); host = rabbitmq.getHost(); amqpPort = mappedPort; sampleData = new ArrayList<>(5); addNameToReaderList(sampleData, "Jane"); addNameToReaderList(sampleData, "John"); addNameToReaderList(sampleData, "Liz"); addNameToReaderList(sampleData, "Cameron"); addNameToReaderList(sampleData, "Judy"); } private static void addNameToReaderList(List> itemReaderList, String value) { Map prepMap = new HashMap<>(); prepMap.put("first_name", value); itemReaderList.add(prepMap); } @BeforeEach void setupTest() { this.connectionFactory = new CachingConnectionFactory(host, amqpPort); this.template = new RabbitTemplate(this.connectionFactory); this.template.setMessageConverter(new JacksonJsonMessageConverter()); AmqpAdmin admin = new RabbitAdmin(this.connectionFactory); admin.declareQueue(new Queue(QUEUE_NAME)); admin.declareExchange(new TopicExchange(EXCHANGE_NAME)); admin.declareBinding(new Binding(QUEUE_NAME, Binding.DestinationType.QUEUE, EXCHANGE_NAME, "#", null)); this.configurations = new String[] { "spring.batch.job.jobName=integrationJob", "spring.batch.job.stepName=step1", "spring.batch.job.chunkSize=5", "spring.rabbitmq.template.exchange=" + EXCHANGE_NAME, "spring.rabbitmq.host=" + host, "spring.batch.job.amqpitemwriter.enabled=true", "spring.rabbitmq.port=" + amqpPort }; } @AfterEach void teardownTest() { AmqpAdmin admin = new RabbitAdmin(this.connectionFactory); admin.deleteQueue(QUEUE_NAME); this.template.destroy(); } @Test void basicTest() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(BaseConfiguration.class) .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, AmqpItemWriterAutoConfiguration.class, RabbitAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues(this.configurations); applicationContextRunner.run((context) -> { JobExecution jobExecution = runJob(context); JobRepository jobRepository = context.getBean(JobRepository.class); while (jobRepository.getJobExecution(jobExecution.getId()).isRunning()) { Thread.sleep(1000); } for (Map sampleEntry : sampleData) { Map map = (Map) template.receiveAndConvert(QUEUE_NAME); assertThat(map.get("first_name")).isEqualTo(sampleEntry.get("first_name")); } }); } @Test void useAmqpTemplateTest() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(MockConfiguration.class) .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, SingleStepJobAutoConfiguration.class, AmqpItemWriterAutoConfiguration.class, DataSourceAutoConfiguration.class)) .withPropertyValues(this.configurations); applicationContextRunner.run((context) -> { runJob(context); AmqpTemplate amqpTemplate = context.getBean(AmqpTemplate.class); Mockito.verify(amqpTemplate, Mockito.times(5)).convertAndSend(Mockito.any()); }); } private JobExecution runJob(AssertableApplicationContext context) throws Exception { JobOperator jobOperator = context.getBean(JobOperator.class); Job job = context.getBean(Job.class); return jobOperator.start(job, new JobParameters()); } @Configuration public static class BaseConfiguration extends ItemWriterConfiguration { } @Configuration public static class MockConfiguration extends ItemWriterConfiguration { @Bean AmqpTemplate amqpTemplateBean() { return Mockito.mock(AmqpTemplate.class); } } public static class ItemWriterConfiguration { @Bean public PlatformTransactionManager platformTransactionManager() { return new ResourcelessTransactionManager(); } @Bean public RowMapper> rowMapper() { return (rs, rowNum) -> { Map item = new HashMap<>(); item.put("item", rs.getString("item_name")); return item; }; } @Bean public ItemReader> itemWriter() { return new ListItemReader<>(sampleData); } } } ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/resources/logback-test.xml ================================================ ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/resources/schema-h2.sql ================================================ CREATE TABLE IF NOT EXISTS item ( item_name varchar(55) ); ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/resources/test.txt ================================================ 1 2 3 4 5 six # This should be ignored 7 8 9 1011twelve $ So should this 1314151617eighteen 1920212223twenty four 1526272829thirty 3132333435thirty six ================================================ FILE: spring-cloud-starter-single-step-batch-job/src/test/resources/testUTF8.csv ================================================ 1@2@3@4@5@six # This should be ignored 7@8@9@10@11@twelve $ So should this 13@14@15@16@17@eighteen 19@20@21@22@23@%twenty four% 15@26@27@28@29@thirty 31@32@33@34@35@thirty six 37@38@39@40@41@forty two 43@44@45@46@47@forty eight 49@50@51@52@53@fifty four 55@56@57@58@59@sixty ================================================ FILE: spring-cloud-starter-task/pom.xml ================================================ 4.0.0 org.springframework.cloud spring-cloud-task-parent 5.0.2-SNAPSHOT spring-cloud-starter-task jar Spring Cloud Task Starter Spring Boot starter for Spring Cloud Task org.springframework.boot spring-boot-starter org.springframework.cloud spring-cloud-task-core org.springframework.cloud spring-cloud-task-batch org.springframework.cloud spring-cloud-task-stream org.springframework.boot spring-boot-starter-jdbc ================================================ FILE: spring-cloud-task-batch/pom.xml ================================================ 4.0.0 org.springframework.cloud spring-cloud-task-parent 5.0.2-SNAPSHOT spring-cloud-task-batch jar Spring Cloud Task Batch Module for use when combining Spring Cloud Task with Spring Batch org.springframework.cloud spring-cloud-task-core org.springframework.boot spring-boot-batch org.springframework.batch spring-batch-integration true org.springframework spring-test test org.springframework.boot spring-boot-test test org.springframework.boot spring-boot-starter-jdbc test com.h2database h2 test org.springframework.boot spring-boot-configuration-processor true ${spring-boot.version} org.assertj assertj-core test org.junit.jupiter junit-jupiter-api test org.springframework.boot spring-boot-autoconfigure-processor true org.springframework.boot spring-boot-starter-batch test org.mockito mockito-junit-jupiter test org.junit.jupiter junit-jupiter test ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/configuration/JobLaunchCondition.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.configuration; import org.springframework.boot.autoconfigure.condition.AllNestedConditions; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; /** * Evaluates if the correct conditions have been met to create a TaskJobLauncher. * * @author Glenn Renfro * @since 2.2.0 */ public class JobLaunchCondition extends AllNestedConditions { public JobLaunchCondition() { super(ConfigurationPhase.PARSE_CONFIGURATION); } @ConditionalOnProperty(name = "spring.cloud.task.batch.fail-on-job-failure", havingValue = "true") static class FailOnJobFailureCondition { } @ConditionalOnProperty(prefix = "spring.batch.job", name = "enabled", havingValue = "true", matchIfMissing = true) static class SpringBatchJobCondition { } } ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/configuration/TaskBatchAutoConfiguration.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.configuration; import org.springframework.batch.core.job.Job; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.task.batch.listener.TaskBatchExecutionListener; import org.springframework.cloud.task.configuration.TaskConfigurer; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.listener.TaskLifecycleListener; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; /** * Provides auto configuration for the {@link TaskBatchExecutionListener}. * * The spring.cloud.task.batch.listener.enable is deprecated, * spring.cloud.task.batch.listener.enabled should be used. * * @author Michael Minella */ @AutoConfiguration @ConditionalOnBean({ Job.class, TaskLifecycleListener.class }) @ConditionalOnProperty(name = { "spring.cloud.task.batch.listener.enable", "spring.cloud.task.batch.listener.enabled" }, havingValue = "true", matchIfMissing = true) public class TaskBatchAutoConfiguration { @Bean @ConditionalOnMissingBean public static TaskBatchExecutionListenerBeanPostProcessor batchTaskExecutionListenerBeanPostProcessor() { return new TaskBatchExecutionListenerBeanPostProcessor(); } /** * Auto configuration for {@link TaskBatchExecutionListener}. */ @AutoConfiguration @ConditionalOnMissingBean(name = "taskBatchExecutionListener") @EnableConfigurationProperties(TaskProperties.class) public static class TaskBatchExecutionListenerAutoconfiguration { @Autowired private ApplicationContext context; @Autowired private TaskProperties taskProperties; @Bean public TaskBatchExecutionListenerFactoryBean taskBatchExecutionListener(TaskExplorer taskExplorer) { TaskConfigurer taskConfigurer = null; if (!this.context.getBeansOfType(TaskConfigurer.class).isEmpty()) { taskConfigurer = this.context.getBean(TaskConfigurer.class); } if (taskConfigurer != null && taskConfigurer.getTaskDataSource() != null) { return new TaskBatchExecutionListenerFactoryBean(taskConfigurer.getTaskDataSource(), taskExplorer, this.taskProperties.getTablePrefix()); } else { return new TaskBatchExecutionListenerFactoryBean(null, taskExplorer, this.taskProperties.getTablePrefix()); } } } } ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/configuration/TaskBatchExecutionListenerBeanPostProcessor.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.configuration; import java.util.ArrayList; import java.util.List; import org.springframework.batch.core.job.AbstractJob; import org.springframework.beans.BeansException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.cloud.task.batch.listener.TaskBatchExecutionListener; import org.springframework.context.ApplicationContext; import org.springframework.util.Assert; /** * Injects a configured {@link TaskBatchExecutionListener} into any batch jobs (beans * assignable to {@link AbstractJob}) that are executed within the scope of a task. The * context this is used within is expected to have only one bean of type * {@link TaskBatchExecutionListener}. * * @author Michael Minella */ public class TaskBatchExecutionListenerBeanPostProcessor implements BeanPostProcessor { @Autowired private ApplicationContext applicationContext; private List jobNames = new ArrayList<>(); @Override public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { return bean; } @Override public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { if (this.jobNames.size() > 0 && !this.jobNames.contains(beanName)) { return bean; } int length = this.applicationContext.getBeanNamesForType(TaskBatchExecutionListener.class).length; if (bean instanceof AbstractJob) { if (length != 1) { throw new IllegalStateException("The application context is required to " + "have exactly 1 instance of the TaskBatchExecutionListener but has " + length); } ((AbstractJob) bean) .registerJobExecutionListener(this.applicationContext.getBean(TaskBatchExecutionListener.class)); } return bean; } public void setJobNames(List jobNames) { Assert.notNull(jobNames, "A list is required"); this.jobNames = jobNames; } } ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/configuration/TaskBatchExecutionListenerFactoryBean.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.configuration; import java.lang.reflect.Field; import javax.sql.DataSource; import org.springframework.aop.framework.Advised; import org.springframework.aop.support.AopUtils; import org.springframework.beans.factory.FactoryBean; import org.springframework.cloud.task.batch.listener.TaskBatchExecutionListener; import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.batch.listener.support.MapTaskBatchDao; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.dao.MapTaskExecutionDao; import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; import org.springframework.util.Assert; import org.springframework.util.ReflectionUtils; /** * {@link FactoryBean} for a {@link TaskBatchExecutionListener}. Provides a jdbc based * listener if there is a {@link DataSource} available. Otherwise, builds a listener that * uses the map based implementation. * * @author Michael Minella */ public class TaskBatchExecutionListenerFactoryBean implements FactoryBean { private TaskBatchExecutionListener listener; private DataSource dataSource; private TaskExplorer taskExplorer; private String tablePrefix = TaskProperties.DEFAULT_TABLE_PREFIX; /** * Initializes the TaskBatchExecutionListenerFactoryBean and defaults the tablePrefix * to {@link TaskProperties#DEFAULT_TABLE_PREFIX}. * @param dataSource the dataSource to use for the TaskBatchExecutionListener. * @param taskExplorer the taskExplorer to use for the TaskBatchExecutionListener. */ public TaskBatchExecutionListenerFactoryBean(DataSource dataSource, TaskExplorer taskExplorer) { this.dataSource = dataSource; this.taskExplorer = taskExplorer; } /** * Initializes the TaskBatchExecutionListenerFactoryBean. * @param dataSource the dataSource to use for the TaskBatchExecutionListener. * @param taskExplorer the taskExplorer to use for the TaskBatchExecutionListener. * @param tablePrefix the prefix for the task tables accessed by the * TaskBatchExecutionListener. */ public TaskBatchExecutionListenerFactoryBean(DataSource dataSource, TaskExplorer taskExplorer, String tablePrefix) { this(dataSource, taskExplorer); Assert.hasText(tablePrefix, "tablePrefix must not be null nor empty."); this.tablePrefix = tablePrefix; } @Override public TaskBatchExecutionListener getObject() throws Exception { if (this.listener != null) { return this.listener; } if (this.dataSource == null) { this.listener = new TaskBatchExecutionListener(getMapTaskBatchDao()); } else { this.listener = new TaskBatchExecutionListener(new JdbcTaskBatchDao(this.dataSource, this.tablePrefix)); } return this.listener; } @Override public Class getObjectType() { return TaskBatchExecutionListener.class; } @Override public boolean isSingleton() { return true; } private MapTaskBatchDao getMapTaskBatchDao() throws Exception { Field taskExecutionDaoField = ReflectionUtils.findField(SimpleTaskExplorer.class, "taskExecutionDao"); taskExecutionDaoField.setAccessible(true); MapTaskExecutionDao taskExecutionDao; if (AopUtils.isJdkDynamicProxy(this.taskExplorer)) { SimpleTaskExplorer dereferencedTaskRepository = (SimpleTaskExplorer) ((Advised) this.taskExplorer) .getTargetSource() .getTarget(); taskExecutionDao = (MapTaskExecutionDao) ReflectionUtils.getField(taskExecutionDaoField, dereferencedTaskRepository); } else { taskExecutionDao = (MapTaskExecutionDao) ReflectionUtils.getField(taskExecutionDaoField, this.taskExplorer); } return new MapTaskBatchDao(taskExecutionDao.getBatchJobAssociations()); } } ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/configuration/TaskBatchProperties.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.configuration; import org.springframework.boot.context.properties.ConfigurationProperties; /** * Establish properties to be used for how Tasks work with Spring Batch. * * @author Glenn Renfro * @author Michael Minella * @since 2.0.0 */ @ConfigurationProperties(prefix = "spring.cloud.task.batch") public class TaskBatchProperties { private static final long DEFAULT_POLL_INTERVAL = 5000L; /** * Comma-separated list of job names to execute on startup (for instance, * `job1,job2`). By default, all Jobs found in the context are executed. * @deprecated use spring.batch.job.name instead of spring.cloud.task.batch.jobNames. */ private String jobNames = ""; /** * The order for the {@code ApplicationRunner} used to run batch jobs when * {@code spring.cloud.task.batch.fail-on-job-failure=true}. Defaults to 0 (same as * the * {@link org.springframework.boot.batch.autoconfigure.JobLauncherApplicationRunner}). */ private int applicationRunnerOrder = 0; /** * Fixed delay in milliseconds that Spring Cloud Task will wait when checking if * {@link org.springframework.batch.core.JobExecution}s have completed, when * spring.cloud.task.batch.failOnJobFailure is set to true. Defaults to 5000. */ private long failOnJobFailurePollInterval = DEFAULT_POLL_INTERVAL; public String getJobNames() { return this.jobNames; } public void setJobNames(String jobNames) { this.jobNames = jobNames; } @Deprecated public int getCommandLineRunnerOrder() { return this.applicationRunnerOrder; } @Deprecated public void setCommandLineRunnerOrder(int commandLineRunnerOrder) { this.applicationRunnerOrder = commandLineRunnerOrder; } public int getApplicationRunnerOrder() { return this.applicationRunnerOrder; } public void setApplicationRunnerOrder(int applicationRunnerOrder) { this.applicationRunnerOrder = applicationRunnerOrder; } public long getFailOnJobFailurePollInterval() { return this.failOnJobFailurePollInterval; } public void setFailOnJobFailurePollInterval(long failOnJobFailurePollInterval) { this.failOnJobFailurePollInterval = failOnJobFailurePollInterval; } } ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/configuration/TaskJobLauncherApplicationRunnerFactoryBean.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.configuration; import java.util.List; import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.beans.factory.FactoryBean; import org.springframework.boot.batch.autoconfigure.BatchProperties; import org.springframework.cloud.task.batch.handler.TaskJobLauncherApplicationRunner; import org.springframework.context.ApplicationEventPublisher; import org.springframework.context.ApplicationEventPublisherAware; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** * Factory bean for creating an instance of {@link TaskJobLauncherApplicationRunner}. * * @author Glenn Renfro * @since 2.3.0 */ public class TaskJobLauncherApplicationRunnerFactoryBean implements FactoryBean, ApplicationEventPublisherAware { private final JobOperator jobOperator; private final List jobs; private String jobName; private final JobRegistry jobRegistry; private Integer order; private final TaskBatchProperties taskBatchProperties; private final JobRepository jobRepository; private ApplicationEventPublisher applicationEventPublisher; public TaskJobLauncherApplicationRunnerFactoryBean(JobOperator jobOperator, List jobs, TaskBatchProperties taskBatchProperties, JobRegistry jobRegistry, JobRepository jobRepository, BatchProperties batchProperties) { Assert.notNull(taskBatchProperties, "taskBatchProperties must not be null"); Assert.notNull(batchProperties, "batchProperties must not be null"); Assert.notEmpty(jobs, "jobs must not be null nor empty"); this.jobOperator = jobOperator; this.jobs = jobs; this.jobName = taskBatchProperties.getJobNames(); this.jobRegistry = jobRegistry; this.taskBatchProperties = taskBatchProperties; if (StringUtils.hasText(batchProperties.getJob().getName())) { this.jobName = batchProperties.getJob().getName(); } else { this.jobName = taskBatchProperties.getJobNames(); } this.order = taskBatchProperties.getApplicationRunnerOrder(); this.jobRepository = jobRepository; } public void setOrder(int order) { this.order = order; } @Override public TaskJobLauncherApplicationRunner getObject() { TaskJobLauncherApplicationRunner taskJobLauncherApplicationRunner = new TaskJobLauncherApplicationRunner( this.jobOperator, this.jobRepository, this.taskBatchProperties); taskJobLauncherApplicationRunner.setJobs(this.jobs); if (StringUtils.hasText(this.jobName)) { taskJobLauncherApplicationRunner.setJobName(this.jobName); } taskJobLauncherApplicationRunner.setJobRegistry(this.jobRegistry); if (this.order != null) { taskJobLauncherApplicationRunner.setOrder(this.order); } if (this.applicationEventPublisher != null) { taskJobLauncherApplicationRunner.setApplicationEventPublisher(this.applicationEventPublisher); } return taskJobLauncherApplicationRunner; } @Override public Class getObjectType() { return TaskJobLauncherApplicationRunner.class; } @Override public void setApplicationEventPublisher(ApplicationEventPublisher applicationEventPublisher) { this.applicationEventPublisher = applicationEventPublisher; } } ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/configuration/TaskJobLauncherAutoConfiguration.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.configuration; import java.util.List; import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureBefore; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; /** * Provides auto configuration for the * {@link org.springframework.cloud.task.batch.handler.TaskJobLauncherApplicationRunner}. * * @author Glenn Renfro */ @AutoConfiguration @Conditional(JobLaunchCondition.class) @EnableConfigurationProperties(TaskBatchProperties.class) @AutoConfigureBefore(BatchAutoConfiguration.class) public class TaskJobLauncherAutoConfiguration { @Autowired private TaskBatchProperties properties; @Bean @ConditionalOnClass(name = "org.springframework.boot.batch.autoconfigure.JobLauncherApplicationRunner") public TaskJobLauncherApplicationRunnerFactoryBean taskJobLauncherApplicationRunner(JobOperator jobLauncher, List jobs, JobRegistry jobRegistry, JobRepository jobRepository, BatchProperties batchProperties) { TaskJobLauncherApplicationRunnerFactoryBean taskJobLauncherApplicationRunnerFactoryBean; taskJobLauncherApplicationRunnerFactoryBean = new TaskJobLauncherApplicationRunnerFactoryBean(jobLauncher, jobs, this.properties, jobRegistry, jobRepository, batchProperties); return taskJobLauncherApplicationRunnerFactoryBean; } } ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/configuration/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Configuration classes for Spring Cloud Task Batch integration. */ package org.springframework.cloud.task.batch.configuration; ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/handler/TaskJobLauncherApplicationRunner.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.handler; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.JobExecutionException; import org.springframework.batch.core.job.JobInstance; import org.springframework.batch.core.job.parameters.InvalidJobParametersException; import org.springframework.batch.core.job.parameters.JobParameter; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.job.parameters.JobParametersIncrementer; import org.springframework.batch.core.launch.JobExecutionAlreadyRunningException; import org.springframework.batch.core.launch.JobInstanceAlreadyCompleteException; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.launch.JobRestartException; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.batch.infrastructure.repeat.support.RepeatTemplate; import org.springframework.boot.ApplicationRunner; import org.springframework.boot.batch.autoconfigure.JobExecutionEvent; import org.springframework.boot.batch.autoconfigure.JobLauncherApplicationRunner; import org.springframework.cloud.task.batch.configuration.TaskBatchProperties; import org.springframework.cloud.task.listener.TaskException; import org.springframework.context.ApplicationEventPublisher; import org.springframework.core.task.TaskExecutor; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** * {@link ApplicationRunner} to {@link JobOperator launch} Spring Batch jobs. Runs all * jobs in the surrounding context by default and throws an exception upon the first job * that returns an {@link BatchStatus} of FAILED if a {@link TaskExecutor} in the * {@link JobOperator} is not specified. If a {@link TaskExecutor} is specified in the * {@link JobOperator} then all Jobs are launched and an exception is thrown if one or * more of the jobs has an {@link BatchStatus} of FAILED. TaskJobLauncherApplicationRunner * can also be used to launch a specific job by providing a jobName. The * TaskJobLauncherApplicationRunner takes the place of the * {@link JobLauncherApplicationRunner} when it is in use. * * @author Glenn Renfro * @since 2.3.0 */ public class TaskJobLauncherApplicationRunner extends JobLauncherApplicationRunner { private static final Log logger = LogFactory.getLog(TaskJobLauncherApplicationRunner.class); private final JobOperator taskJobOperator; private final JobRepository taskJobRepository; private final List jobExecutionList = new ArrayList<>(); private ApplicationEventPublisher taskApplicationEventPublisher; private final TaskBatchProperties taskBatchProperties; /** * Create a new {@link TaskJobLauncherApplicationRunner}. * @param jobOperator to launch jobs * @param jobRepository to check if a job instance exists with the given parameters * when running a job * @param taskBatchProperties the properties used to configure the * taskBatchProperties. */ public TaskJobLauncherApplicationRunner(JobOperator jobOperator, JobRepository jobRepository, TaskBatchProperties taskBatchProperties) { super(jobOperator); this.taskJobOperator = jobOperator; this.taskJobRepository = jobRepository; this.taskBatchProperties = taskBatchProperties; } @Override public void setApplicationEventPublisher(ApplicationEventPublisher publisher) { super.setApplicationEventPublisher(publisher); this.taskApplicationEventPublisher = publisher; } @Override public void run(String... args) throws JobExecutionException { logger.info("Running default command line with: " + Arrays.asList(args)); Properties properties = StringUtils.splitArrayElementsIntoProperties(args, "="); if (properties == null) { properties = new Properties(); } launchJobFromProperties(properties); monitorJobExecutions(); } @Override protected void execute(Job job, JobParameters jobParameters) throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, InvalidJobParametersException { String jobName = job.getName(); JobParameters parameters = jobParameters; boolean jobInstanceExists = this.taskJobRepository.getJobInstance(job.getName(), jobParameters) != null; if (jobInstanceExists) { JobExecution lastJobExecution = this.taskJobRepository.getLastJobExecution(jobName, jobParameters); if (lastJobExecution != null && isStoppedOrFailed(lastJobExecution) && job.isRestartable()) { // Retry a failed or stopped execution with previous parameters JobParameters previousParameters = lastJobExecution.getJobParameters(); /* * remove Non-identifying parameters from the previous execution's * parameters since there is no way to remove them programmatically. If * they are required (or need to be modified) on a restart, they need to * be (re)specified. */ JobParameters previousIdentifyingParameters = removeNonIdentifying(previousParameters); // merge additional parameters with previous ones (overriding those with // the same key) parameters = merge(previousIdentifyingParameters, jobParameters); } } else { JobParametersIncrementer incrementer = job.getJobParametersIncrementer(); if (incrementer != null) { JobParameters nextParameters = getNextJobParameters(job, new HashSet<>(jobParameters.parameters()), this.taskJobRepository); parameters = merge(nextParameters, jobParameters); } } JobExecution execution = this.taskJobOperator.start(job, parameters); if (this.taskApplicationEventPublisher != null) { this.taskApplicationEventPublisher.publishEvent(new JobExecutionEvent(execution)); } this.jobExecutionList.add(execution); if (execution.getStatus().equals(BatchStatus.FAILED)) { throwJobFailedException(Collections.singletonList(execution)); } } private void monitorJobExecutions() { RepeatTemplate template = new RepeatTemplate(); template.iterate(context -> { List failedJobExecutions = new ArrayList<>(); for (JobExecution jobExecution : this.jobExecutionList) { BatchStatus batchStatus = getCurrentBatchStatus(jobExecution); if (batchStatus.isRunning()) { Thread.sleep(this.taskBatchProperties.getFailOnJobFailurePollInterval()); return RepeatStatus.CONTINUABLE; } if (batchStatus.equals(BatchStatus.FAILED)) { failedJobExecutions.add(jobExecution); } } if (failedJobExecutions.size() > 0) { throwJobFailedException(failedJobExecutions); } return RepeatStatus.FINISHED; }); } private BatchStatus getCurrentBatchStatus(JobExecution jobExecution) { if (jobExecution.getStatus().isRunning()) { return this.taskJobRepository.getJobExecution(jobExecution.getId()).getStatus(); } return jobExecution.getStatus(); } private void throwJobFailedException(List failedJobExecutions) { StringBuilder message = new StringBuilder("The following Jobs have failed: \n"); for (JobExecution failedJobExecution : failedJobExecutions) { message.append(String.format( "Job %s failed during " + "execution for job instance id %s with jobExecutionId of %s \n", failedJobExecution.getJobInstance().getJobName(), failedJobExecution.getId(), failedJobExecution.getId())); } logger.error(message); throw new TaskException(message.toString()); } private JobParameters removeNonIdentifying(JobParameters parameters) { Set> parameterMap = parameters.parameters(); Set> copy = new HashSet<>(); for (JobParameter parameter : parameterMap) { if (parameter.identifying()) { copy.add(parameter); } } return new JobParameters(copy); } private boolean isStoppedOrFailed(JobExecution execution) { BatchStatus status = execution.getStatus(); return (status == BatchStatus.STOPPED || status == BatchStatus.FAILED); } private JobParameters merge(JobParameters parameters, JobParameters additionals) { Map> merged = new HashMap<>(); // Add base parameters for (JobParameter param : parameters.parameters()) { merged.put(param.name(), param); } // Override with additionals for (JobParameter param : additionals.parameters()) { merged.put(param.name(), param); } return new JobParameters(new HashSet<>(merged.values())); } /** * Initializes the {@link JobParameters} based on the state of the {@link Job}. This * should be called after all parameters have been entered into the builder. All * parameters already set on this builder instance are appended to those retrieved * from the job incrementer, overriding any with the same key (this is the same * behavior as * {@link org.springframework.batch.core.launch.support.CommandLineJobRunner} with the * {@code -next} option and * {@link org.springframework.batch.core.launch.JobOperator#startNextInstance(String)}). * @param job The job for which the {@link JobParameters} are being constructed. * @return a reference to this object. * * @since 4.0 */ public JobParameters getNextJobParameters(Job job, Set> parameterMap, JobRepository taskJobRepository) { Assert.notNull(job, "Job must not be null"); Assert.notNull(job.getJobParametersIncrementer(), "No job parameters incrementer found for job=" + job.getName()); String name = job.getName(); JobParameters nextParameters; JobInstance lastInstance = taskJobRepository.getLastJobInstance(name); JobParametersIncrementer incrementer = job.getJobParametersIncrementer(); if (lastInstance == null) { // Start from a completely clean sheet nextParameters = incrementer.getNext(new JobParameters()); } else { JobExecution previousExecution = taskJobRepository.getLastJobExecution(lastInstance); if (previousExecution == null) { // Normally this will not happen - an instance exists with no executions nextParameters = incrementer.getNext(new JobParameters()); } else { nextParameters = incrementer.getNext(previousExecution.getJobParameters()); } } // start with parameters from the incrementer Set> nextParametersMap = new HashSet<>(nextParameters.parameters()); // append new parameters (overriding those with the same key) nextParametersMap.addAll(parameterMap); parameterMap = nextParametersMap; return new JobParameters(parameterMap); } } ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/handler/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Handler components for Spring Cloud Task Batch integration. */ package org.springframework.cloud.task.batch.handler; ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/listener/TaskBatchDao.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import org.springframework.batch.core.job.JobExecution; import org.springframework.cloud.task.repository.TaskExecution; /** * Maintains the association between a {@link TaskExecution} and a {@link JobExecution} * executed within it. * * @author Michael Minella */ public interface TaskBatchDao { /** * Saves the relationship between a task execution and a job execution. * @param taskExecution task execution * @param jobExecution job execution */ void saveRelationship(TaskExecution taskExecution, JobExecution jobExecution); } ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/listener/TaskBatchExecutionListener.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.listener.JobExecutionListener; import org.springframework.cloud.task.listener.TaskExecutionListener; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.core.Ordered; import org.springframework.util.Assert; /** * Responsible for storing the relationship between a Spring Batch job and the Spring * Cloud task it was executed within. * * @author Michael Minella */ public class TaskBatchExecutionListener implements JobExecutionListener, Ordered, TaskExecutionListener { private static final Log logger = LogFactory.getLog(TaskBatchExecutionListener.class); private TaskExecution taskExecution; private final TaskBatchDao taskBatchDao; /** * @param taskBatchDao dao used to persist the relationship. Must not be null */ public TaskBatchExecutionListener(TaskBatchDao taskBatchDao) { Assert.notNull(taskBatchDao, "A TaskBatchDao is required"); this.taskBatchDao = taskBatchDao; } @Override public void onTaskStartup(TaskExecution taskExecution) { this.taskExecution = taskExecution; } @Override public void beforeJob(JobExecution jobExecution) { if (this.taskExecution == null) { logger.warn("This job was executed outside the scope of a task but still used the task listener."); } else { logger.info(String.format("The job execution id %s was run within the task execution %s", jobExecution.getId(), this.taskExecution.getExecutionId())); this.taskBatchDao.saveRelationship(this.taskExecution, jobExecution); } } @Override public int getOrder() { return Ordered.HIGHEST_PRECEDENCE; } } ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/listener/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Listener components for Spring Cloud Task Batch integration. */ package org.springframework.cloud.task.batch.listener; ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/listener/support/JdbcTaskBatchDao.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; import javax.sql.DataSource; import org.springframework.batch.core.job.JobExecution; import org.springframework.cloud.task.batch.listener.TaskBatchDao; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** * JDBC based implementation of the {@link TaskBatchDao}. Intended to be used in * conjunction with the JDBC based * {@link org.springframework.cloud.task.repository.TaskRepository} * * @author Michael Minella * @author Glenn Renfro */ public class JdbcTaskBatchDao implements TaskBatchDao { private static final String INSERT_STATEMENT = "INSERT INTO %PREFIX%TASK_BATCH (TASK_EXECUTION_ID, JOB_EXECUTION_ID) VALUES (?, ?)"; private String tablePrefix = TaskProperties.DEFAULT_TABLE_PREFIX; private final JdbcOperations jdbcTemplate; /** * Initializes the JdbcTaskBatchDao. * @param dataSource {@link DataSource} where the task batch table resides. * @param tablePrefix the table prefix to use for this dao. */ public JdbcTaskBatchDao(DataSource dataSource, String tablePrefix) { this(dataSource); Assert.hasText(tablePrefix, "tablePrefix must not be null nor empty."); this.tablePrefix = tablePrefix; } /** * Initializes the JdbcTaskBatchDao and defaults the table prefix to * {@link TaskProperties#DEFAULT_TABLE_PREFIX}. * @param dataSource {@link DataSource} where the task batch table resides. */ public JdbcTaskBatchDao(DataSource dataSource) { Assert.notNull(dataSource, "A dataSource is required"); this.jdbcTemplate = new JdbcTemplate(dataSource); } @Override public void saveRelationship(TaskExecution taskExecution, JobExecution jobExecution) { Assert.notNull(taskExecution, "A taskExecution is required"); Assert.notNull(jobExecution, "A jobExecution is required"); this.jdbcTemplate.update(getQuery(INSERT_STATEMENT), taskExecution.getExecutionId(), jobExecution.getId()); } private String getQuery(String base) { return StringUtils.replace(base, "%PREFIX%", this.tablePrefix); } } ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/listener/support/MapTaskBatchDao.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; import java.util.Map; import java.util.Set; import java.util.TreeSet; import org.springframework.batch.core.job.JobExecution; import org.springframework.cloud.task.batch.listener.TaskBatchDao; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.util.Assert; /** * Map implementation of the {@link TaskBatchDao}. *

* This is intended for testing purposes only! *

* * @author Michael Minella */ public class MapTaskBatchDao implements TaskBatchDao { private Map> relationships; public MapTaskBatchDao(Map> relationships) { Assert.notNull(relationships, "Relationships must not be null"); this.relationships = relationships; } @Override public void saveRelationship(TaskExecution taskExecution, JobExecution jobExecution) { Assert.notNull(taskExecution, "A taskExecution is required"); Assert.notNull(jobExecution, "A jobExecution is required"); if (this.relationships.containsKey(taskExecution.getExecutionId())) { this.relationships.get(taskExecution.getExecutionId()).add(jobExecution.getId()); } else { TreeSet jobExecutionIds = new TreeSet<>(); jobExecutionIds.add(jobExecution.getId()); this.relationships.put(taskExecution.getExecutionId(), jobExecutionIds); } } } ================================================ FILE: spring-cloud-task-batch/src/main/java/org/springframework/cloud/task/batch/listener/support/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Support classes for Spring Cloud Task Batch listener implementations. */ package org.springframework.cloud.task.batch.listener.support; ================================================ FILE: spring-cloud-task-batch/src/main/resources/META-INF/additional-spring-configuration-metadata.json ================================================ { "properties": [ { "defaultValue": true, "name": "spring.cloud.task.batch.listener.enabled", "description": "This property is used to determine if a task will be linked to the batch jobs that are run.", "type": "java.lang.Boolean" }, { "defaultValue": false, "name": "spring.cloud.task.batch.fail-on-job-failure", "description": "This property is used to determine if a task app should return with a non zero exit code if a batch job fails.", "type": "java.lang.Boolean" }, { "defaultValue": true, "name": "spring.cloud.task.batch.events.enabled", "description": "This property is used to determine if a task should listen for batch events.", "type": "java.lang.Boolean" }, { "defaultValue": true, "name": "spring.cloud.task.batch.events.chunk.enabled", "description": "This property is used to determine if a task should listen for batch chunk events.", "type": "java.lang.Boolean" }, { "defaultValue": true, "name": "spring.cloud.task.batch.events.item-process.enabled", "description": "This property is used to determine if a task should listen for batch item processed events.", "type": "java.lang.Boolean" }, { "defaultValue": true, "name": "spring.cloud.task.batch.events.item-read.enabled", "description": "This property is used to determine if a task should listen for batch item read events.", "type": "java.lang.Boolean" }, { "defaultValue": true, "name": "spring.cloud.task.batch.events.item-write.enabled", "description": "This property is used to determine if a task should listen for batch item write events.", "type": "java.lang.Boolean" }, { "defaultValue": true, "name": "spring.cloud.task.batch.events.job-execution.enabled", "description": "This property is used to determine if a task should listen for batch job execution events.", "type": "java.lang.Boolean" }, { "defaultValue": true, "name": "spring.cloud.task.batch.events.skip.enabled", "description": "This property is used to determine if a task should listen for batch skip events.", "type": "java.lang.Boolean" }, { "defaultValue": true, "name": "spring.cloud.task.batch.events.step-execution.enabled", "description": "This property is used to determine if a task should listen for batch step execution events.", "type": "java.lang.Boolean" } ] } ================================================ FILE: spring-cloud-task-batch/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports ================================================ org.springframework.cloud.task.batch.configuration.TaskBatchAutoConfiguration org.springframework.cloud.task.batch.configuration.TaskJobLauncherAutoConfiguration ================================================ FILE: spring-cloud-task-batch/src/test/java/org/springframework/cloud/task/batch/configuration/TaskBatchTest.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.configuration; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; /** * Contains the common configurations to run a unit test for the task batch features of * SCT. * * @author Glenn Renfro */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @Documented @ImportAutoConfiguration public @interface TaskBatchTest { } ================================================ FILE: spring-cloud-task-batch/src/test/java/org/springframework/cloud/task/batch/configuration/TaskJobLauncherAutoConfigurationTests.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.configuration; import org.junit.jupiter.api.Test; import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.batch.core.configuration.support.MapJobRegistry; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.batch.autoconfigure.JobLauncherApplicationRunner; import org.springframework.boot.jdbc.autoconfigure.EmbeddedDataSourceConfiguration; import org.springframework.boot.test.context.assertj.AssertableApplicationContext; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.batch.handler.TaskJobLauncherApplicationRunner; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro */ public class TaskJobLauncherAutoConfigurationTests { private final ApplicationContextRunner contextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(BatchAutoConfiguration.class, EmbeddedDataSourceConfiguration.class)) .withUserConfiguration(TestJobConfiguration.class); @Test public void testAutoBuiltDataSourceWithTaskJobLauncherCLR() { this.contextRunner.withPropertyValues("spring.cloud.task.batch.fail-on-job-failure=true").run(context -> { assertThat(context).hasSingleBean(TaskJobLauncherApplicationRunner.class); assertThat(context.getBean(TaskJobLauncherApplicationRunner.class).getOrder()).isEqualTo(0); }); } @Test public void testAutoBuiltDataSourceWithTaskJobLauncherCLROrder() { this.contextRunner .withPropertyValues("spring.cloud.task.batch.fail-on-job-failure=true", "spring.cloud.task.batch.applicationRunnerOrder=100") .run(context -> { assertThat(context.getBean(TaskJobLauncherApplicationRunner.class).getOrder()).isEqualTo(100); }); } @Test public void testAutoBuiltDataSourceWithBatchJobNames() { this.contextRunner .withPropertyValues("spring.cloud.task.batch.fail-on-job-failure=true", "spring.batch.job.name=job1", "spring.cloud.task.batch.jobName=foobar") .run(context -> { validateJobNames(context, "job1"); }); } @Test public void testAutoBuiltDataSourceWithTaskBatchJobNames() { this.contextRunner .withPropertyValues("spring.cloud.task.batch.fail-on-job-failure=true", "spring.cloud.task.batch.jobNames=job1,job2") .run(context -> { validateJobNames(context, "job1,job2"); }); } private void validateJobNames(AssertableApplicationContext context, String jobNames) throws Exception { JobLauncherApplicationRunner jobLauncherApplicationRunner = context .getBean(TaskJobLauncherApplicationRunner.class); Object names = ReflectionTestUtils.getField(jobLauncherApplicationRunner, "jobName"); assertThat(names).isEqualTo(jobNames); } @Test public void testAutoBuiltDataSourceWithTaskJobLauncherCLRDisabled() { this.contextRunner.run(context -> { // assertThat(context).hasSingleBean(JobLauncherApplicationRunner.class); assertThat(context).doesNotHaveBean(TaskJobLauncherApplicationRunner.class); }); } @Configuration @EnableAutoConfiguration @EnableBatchProcessing static class TestJobConfiguration { @Bean public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { return new JobBuilder("job", jobRepository) .start(new StepBuilder("step1", jobRepository).tasklet((contribution, chunkContext) -> { System.out.println("Executed"); return RepeatStatus.FINISHED; }, transactionManager).build()) .build(); } @Bean PlatformTransactionManager transactionManager() { return new ResourcelessTransactionManager(); } @Bean JobRegistry jobRegistry() { return new MapJobRegistry(); } } } ================================================ FILE: spring-cloud-task-batch/src/test/java/org/springframework/cloud/task/batch/handler/TaskJobLauncherApplicationRunnerCoreTests.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.handler; import java.util.Arrays; import java.util.List; import javax.sql.DataSource; import org.junit.jupiter.api.Test; import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.JobExecutionException; import org.springframework.batch.core.job.JobInstance; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.job.builder.SimpleJobBuilder; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.job.parameters.JobParametersBuilder; import org.springframework.batch.core.job.parameters.RunIdIncrementer; import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.step.Step; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.jdbc.autoconfigure.DataSourceAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.DataSourceTransactionManagerAutoConfiguration; import org.springframework.boot.jdbc.init.DataSourceScriptDatabaseInitializer; import org.springframework.boot.sql.init.DatabaseInitializationSettings; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.boot.transaction.autoconfigure.TransactionAutoConfiguration; import org.springframework.cloud.task.batch.configuration.TaskBatchProperties; import org.springframework.cloud.task.listener.TaskException; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; /** * @author Glenn Renfro */ public class TaskJobLauncherApplicationRunnerCoreTests { private final ApplicationContextRunner contextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(DataSourceAutoConfiguration.class, TransactionAutoConfiguration.class, DataSourceTransactionManagerAutoConfiguration.class)) .withUserConfiguration(BatchConfiguration.class); @Test void basicExecution() { this.contextRunner.run((context) -> { JobLauncherApplicationRunnerContext jobLauncherContext = new JobLauncherApplicationRunnerContext(context); jobLauncherContext.executeJob(new JobParameters()); assertThat(jobLauncherContext.jobInstances()).hasSize(1); jobLauncherContext.executeJob(new JobParametersBuilder().addLong("id", 1L).toJobParameters()); assertThat(jobLauncherContext.jobInstances()).hasSize(2); }); } @Test void incrementExistingExecution() { this.contextRunner.run((context) -> { JobLauncherApplicationRunnerContext jobLauncherContext = new JobLauncherApplicationRunnerContext(context); Job job = jobLauncherContext.configureJob().incrementer(new RunIdIncrementer()).build(); jobLauncherContext.runner.execute(job, new JobParameters()); jobLauncherContext.runner.execute(job, new JobParameters()); assertThat(jobLauncherContext.jobInstances()).hasSize(2); }); } @Test void runDifferentInstances() { this.contextRunner.run((context) -> { PlatformTransactionManager transactionManager = context.getBean(PlatformTransactionManager.class); JobLauncherApplicationRunnerContext jobLauncherContext = new JobLauncherApplicationRunnerContext(context); Job job = jobLauncherContext.jobBuilder() .start(jobLauncherContext.stepBuilder().tasklet(throwingTasklet(), transactionManager).build()) .build(); // start a job instance JobParameters jobParameters = new JobParametersBuilder().addString("name", "foo").toJobParameters(); runFailedJob(jobLauncherContext, job, jobParameters); assertThat(jobLauncherContext.jobInstances()).hasSize(1); // start a different job instance JobParameters otherJobParameters = new JobParametersBuilder().addString("name", "bar").toJobParameters(); runFailedJob(jobLauncherContext, job, otherJobParameters); assertThat(jobLauncherContext.jobInstances()).hasSize(2); }); } @Test void retryFailedExecutionOnNonRestartableJob() { this.contextRunner.run((context) -> { PlatformTransactionManager transactionManager = context.getBean(PlatformTransactionManager.class); JobLauncherApplicationRunnerContext jobLauncherContext = new JobLauncherApplicationRunnerContext(context); Job job = jobLauncherContext.jobBuilder() .preventRestart() .start(jobLauncherContext.stepBuilder().tasklet(throwingTasklet(), transactionManager).build()) .incrementer(new RunIdIncrementer()) .build(); runFailedJob(jobLauncherContext, job, new JobParameters()); // A failed job that is not restartable does not re-use the job params of // the last execution, but creates a new job instance when running it again. assertThat(jobLauncherContext.jobInstances()).hasSize(1); assertThatExceptionOfType(TaskException.class).isThrownBy(() -> { // try to re-run a failed execution // In this case the change from the previous behavior is that a new job // instance is created // https://github.com/spring-projects/spring-batch/issues/4910 jobLauncherContext.runner.execute(job, new JobParametersBuilder().addLong("run.id", 1L).toJobParameters()); }).withMessageContaining("Job job failed during execution for job instance id 2 with jobExecutionId of 2 "); }); } @Test void retryFailedExecutionWithNonIdentifyingParameters() { this.contextRunner.run((context) -> { PlatformTransactionManager transactionManager = context.getBean(PlatformTransactionManager.class); JobLauncherApplicationRunnerContext jobLauncherContext = new JobLauncherApplicationRunnerContext(context); Job job = jobLauncherContext.jobBuilder() .start(jobLauncherContext.stepBuilder().tasklet(throwingTasklet(), transactionManager).build()) .incrementer(new RunIdIncrementer()) .build(); JobParameters jobParameters = new JobParametersBuilder().addLong("id", 1L, false) .addLong("foo", 2L, false) .toJobParameters(); runFailedJob(jobLauncherContext, job, jobParameters); assertThat(jobLauncherContext.jobInstances()).hasSize(1); // try to re-run a failed execution with non identifying parameters // Updated to expect a new 2 instances are created because of this change // https://github.com/spring-projects/spring-batch/issues/4910 runFailedJob(jobLauncherContext, job, new JobParametersBuilder(jobParameters).addLong("run.id", 1L).toJobParameters()); assertThat(jobLauncherContext.jobInstances()).hasSize(2); }); } private Tasklet throwingTasklet() { return (contribution, chunkContext) -> { throw new RuntimeException("Planned"); }; } private void runFailedJob(JobLauncherApplicationRunnerContext jobLauncherContext, Job job, JobParameters jobParameters) throws Exception { boolean isExceptionThrown = false; try { jobLauncherContext.runner.execute(job, jobParameters); } catch (TaskException taskException) { isExceptionThrown = true; } assertThat(isExceptionThrown).isTrue(); } static class JobLauncherApplicationRunnerContext { private final TaskJobLauncherApplicationRunner runner; private final JobRepository jobRepository; private final JobBuilder jobBuilder; private final Job job; private final StepBuilder stepBuilder; private final Step step; JobLauncherApplicationRunnerContext(ApplicationContext context) { JobOperator jobOperator = context.getBean(JobOperator.class); jobRepository = context.getBean(JobRepository.class); PlatformTransactionManager transactionManager = context.getBean(PlatformTransactionManager.class); this.stepBuilder = new StepBuilder("step", jobRepository); this.step = this.stepBuilder.tasklet((contribution, chunkContext) -> null, transactionManager).build(); this.jobBuilder = new JobBuilder("job", jobRepository); this.job = this.jobBuilder.start(this.step).build(); this.runner = new TaskJobLauncherApplicationRunner(jobOperator, jobRepository, new TaskBatchProperties()); } List jobInstances() { return this.jobRepository.getJobInstances("job", 0, 100); } void executeJob(JobParameters jobParameters) throws JobExecutionException { this.runner.execute(this.job, jobParameters); } JobBuilder jobBuilder() { return this.jobBuilder; } StepBuilder stepBuilder() { return this.stepBuilder; } SimpleJobBuilder configureJob() { return this.jobBuilder.start(this.step); } } @EnableBatchProcessing @EnableJdbcJobRepository @Configuration(proxyBeanMethods = false) static class BatchConfiguration { private final DataSource dataSource; protected BatchConfiguration(DataSource dataSource) { this.dataSource = dataSource; } @Bean DataSourceScriptDatabaseInitializer batchDataSourceInitializer() { DatabaseInitializationSettings settings = new DatabaseInitializationSettings(); settings.setSchemaLocations(Arrays.asList("classpath:org/springframework/batch/core/schema-h2.sql")); return new DataSourceScriptDatabaseInitializer(this.dataSource, settings); } } } ================================================ FILE: spring-cloud-task-batch/src/test/java/org/springframework/cloud/task/batch/handler/TaskJobLauncherApplicationRunnerTests.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.handler; import java.util.Arrays; import java.util.Set; import javax.sql.DataSource; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.function.Executable; import org.springframework.batch.core.configuration.JobRegistry; import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.batch.core.configuration.annotation.EnableJdbcJobRepository; import org.springframework.batch.core.configuration.support.MapJobRegistry; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchProperties; import org.springframework.boot.batch.autoconfigure.JobExecutionEvent; import org.springframework.boot.batch.autoconfigure.JobLauncherApplicationRunner; import org.springframework.boot.jdbc.autoconfigure.EmbeddedDataSourceConfiguration; import org.springframework.boot.jdbc.init.DataSourceScriptDatabaseInitializer; import org.springframework.boot.sql.init.DatabaseInitializationSettings; import org.springframework.cloud.task.batch.configuration.TaskBatchAutoConfiguration; import org.springframework.cloud.task.batch.configuration.TaskJobLauncherAutoConfiguration; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.cloud.task.configuration.SingleTaskConfiguration; import org.springframework.cloud.task.listener.TaskException; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.context.ApplicationListener; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.stereotype.Component; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; /** * @author Glenn Renfro */ public class TaskJobLauncherApplicationRunnerTests { private static final String DEFAULT_ERROR_MESSAGE = "Job jobA failed during execution for job instance id 1 with jobExecutionId of 1"; private ConfigurableApplicationContext applicationContext; @AfterEach public void tearDown() { if (this.applicationContext != null && this.applicationContext.isActive()) { this.applicationContext.close(); } } @Test public void testTaskJobLauncherCLRSuccessFail() { String[] enabledArgs = new String[] { "--spring.cloud.task.batch.failOnJobFailure=true" }; validateForFail(DEFAULT_ERROR_MESSAGE, TaskJobLauncherApplicationRunnerTests.JobWithFailureConfiguration.class, enabledArgs); } /** * Verifies that the task will return an exit code other than zero if the job fails * with the EnableTask annotation. */ @Disabled("The task repository is not getting populated.") @Test public void testTaskJobLauncherCLRSuccessFailWithAnnotation() { String[] enabledArgs = new String[] { "--spring.cloud.task.batch.failOnJobFailure=true" }; validateForFail(DEFAULT_ERROR_MESSAGE, TaskJobLauncherApplicationRunnerTests.JobWithFailureAnnotatedConfiguration.class, enabledArgs); } @Test public void testTaskJobLauncherCLRSuccessFailWithTaskExecutor() { String[] enabledArgs = new String[] { "--spring.cloud.task.batch.failOnJobFailure=true", "--spring.cloud.task.batch.failOnJobFailurePollInterval=500" }; validateForFail(DEFAULT_ERROR_MESSAGE, TaskJobLauncherApplicationRunnerTests.JobWithFailureTaskExecutorConfiguration.class, enabledArgs); } @Test public void testNoTaskJobLauncher() { String[] enabledArgs = new String[] { "--spring.cloud.task.batch.failOnJobFailure=true", "--spring.cloud.task.batch.failOnJobFailurePollInterval=500", "--spring.batch.job.enabled=false" }; this.applicationContext = SpringApplication .run(new Class[] { TaskJobLauncherApplicationRunnerTests.JobWithFailureConfiguration.class }, enabledArgs); JobRepository jobRepository = this.applicationContext.getBean(JobRepository.class); assertThat(jobRepository.getJobNames().size()).isEqualTo(0); } @Test public void testTaskJobLauncherPickOneJob() { String[] enabledArgs = new String[] { "--spring.cloud.task.batch.fail-on-job-failure=true", "--spring.cloud.task.batch.jobNames=jobSucceed" }; boolean isExceptionThrown = false; try { this.applicationContext = SpringApplication.run( new Class[] { TaskJobLauncherApplicationRunnerTests.JobWithFailureConfiguration.class }, enabledArgs); } catch (IllegalStateException exception) { isExceptionThrown = true; } assertThat(isExceptionThrown).isFalse(); validateContext(); } @Test public void testApplicationRunnerSetToFalse() { String[] enabledArgs = new String[] {}; this.applicationContext = SpringApplication .run(new Class[] { TaskJobLauncherApplicationRunnerTests.JobConfiguration.class }, enabledArgs); validateContext(); assertThat(this.applicationContext.getBean(JobLauncherApplicationRunner.class)).isNotNull(); Executable executable = () -> this.applicationContext.getBean(TaskJobLauncherApplicationRunner.class); assertThatExceptionOfType(NoSuchBeanDefinitionException.class).isThrownBy(executable::execute) .withMessage("No qualifying bean of type " + "'org.springframework.cloud.task.batch.handler.TaskJobLauncherApplicationRunner' available"); validateContext(); } private void validateContext() { TaskExplorer taskExplorer = this.applicationContext.getBean(TaskExplorer.class); Page page = taskExplorer.findTaskExecutionsByName("application", PageRequest.of(0, 1)); Set jobExecutionIds = taskExplorer .getJobExecutionIdsByTaskExecutionId(page.iterator().next().getExecutionId()); assertThat(jobExecutionIds.size()).isEqualTo(1); assertThat(taskExplorer.getTaskExecution(jobExecutionIds.iterator().next()).getExecutionId()).isEqualTo(1); JobExecutionEventListener listener = this.applicationContext.getBean(JobExecutionEventListener.class); assertThat(listener.getEventCounter()).isEqualTo(1); } private void validateForFail(String errorMessage, Class clazz, String[] enabledArgs) { Executable executable = () -> this.applicationContext = SpringApplication .run(new Class[] { clazz, PropertyPlaceholderAutoConfiguration.class }, enabledArgs); assertThatExceptionOfType(TaskException.class).isThrownBy(executable::execute) .withMessageContaining(errorMessage); } @Component private static final class JobExecutionEventListener implements ApplicationListener { private int eventCounter = 0; @Override public void onApplicationEvent(JobExecutionEvent event) { eventCounter++; } public int getEventCounter() { return eventCounter; } } @EnableTask @Import({ JobExecutionEventListener.class }) @EnableAutoConfiguration(exclude = TaskJobLauncherAutoConfiguration.class) @EnableBatchProcessing @Configuration public static class JobConfiguration { @Bean public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { return new JobBuilder("job", jobRepository) .start(new StepBuilder("step1", jobRepository).tasklet((contribution, chunkContext) -> { System.out.println("Executed"); return RepeatStatus.FINISHED; }, transactionManager).build()) .build(); } @Bean public JobRegistry jobRegistry() { return new MapJobRegistry(); } @Bean public PlatformTransactionManager transactionManager() { return new ResourcelessTransactionManager(); } } @Configuration(proxyBeanMethods = false) @Import(JobExecutionEventListener.class) public static class TransactionManagerTestConfiguration { @Bean public PlatformTransactionManager transactionManager() { return new ResourcelessTransactionManager(); } @Bean public BatchProperties batchProperties() { return new BatchProperties(); } @Bean public JobRegistry jobRegistry() { return new MapJobRegistry(); } @Bean DataSourceScriptDatabaseInitializer batchDataSourceInitializer(DataSource dataSource) { DatabaseInitializationSettings settings = new DatabaseInitializationSettings(); settings.setSchemaLocations(Arrays.asList("classpath:org/springframework/batch/core/schema-h2.sql")); return new DataSourceScriptDatabaseInitializer(dataSource, settings); } } @EnableBatchProcessing @EnableJdbcJobRepository @ImportAutoConfiguration({ PropertyPlaceholderAutoConfiguration.class, BatchAutoConfiguration.class, TaskBatchAutoConfiguration.class, TaskJobLauncherAutoConfiguration.class, SingleTaskConfiguration.class, SimpleTaskAutoConfiguration.class, SimpleTaskAutoConfiguration.class }) @Import({ EmbeddedDataSourceConfiguration.class, TransactionManagerTestConfiguration.class }) @EnableTask public static class JobWithFailureConfiguration { @Autowired private JobRepository jobRepository; @Autowired private PlatformTransactionManager transactionManager; @Bean public Job jobFail() { return new JobBuilder("jobA", this.jobRepository) .start(new StepBuilder("step1", this.jobRepository).tasklet((contribution, chunkContext) -> { System.out.println("Executed"); throw new IllegalStateException("WHOOPS"); }, transactionManager).build()) .build(); } @Bean public Job jobFun() { return new JobBuilder("jobSucceed", this.jobRepository) .start(new StepBuilder("step1Succeed", this.jobRepository).tasklet((contribution, chunkContext) -> { System.out.println("Executed"); return RepeatStatus.FINISHED; }, transactionManager).build()) .build(); } } @EnableTask public static class JobWithFailureAnnotatedConfiguration extends JobWithFailureConfiguration { } @Import(JobWithFailureConfiguration.class) @Configuration @EnableTask public static class JobWithFailureTaskExecutorConfiguration { } } ================================================ FILE: spring-cloud-task-batch/src/test/java/org/springframework/cloud/task/batch/listener/PrefixTests.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import java.util.Set; import javax.sql.DataSource; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.cloud.task.batch.configuration.TaskBatchTest; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.AssertionsForClassTypes.assertThat; /** * @author Glenn Renfro */ public class PrefixTests { private ConfigurableApplicationContext applicationContext; @AfterEach public void tearDown() { if (this.applicationContext != null && this.applicationContext.isActive()) { this.applicationContext.close(); } } @Disabled("Unable to find TaskRepository") @Test public void testPrefix() { this.applicationContext = SpringApplication.run(JobConfiguration.class, "--spring.cloud.task.tablePrefix=FOO_"); TaskExplorer taskExplorer = this.applicationContext.getBean(TaskExplorer.class); Set jobIds = taskExplorer.getJobExecutionIdsByTaskExecutionId(1); assertThat(jobIds.size()).isEqualTo(1); assertThat(jobIds.contains(1L)); } @AutoConfiguration @TaskBatchTest @EnableTask public static class JobConfiguration { @Bean public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { return new JobBuilder("job", jobRepository) .start(new StepBuilder("step1", jobRepository).tasklet((contribution, chunkContext) -> { System.out.println("Executed"); return RepeatStatus.FINISHED; }, transactionManager).build()) .build(); } @Bean public DataSource dataSource() { return new EmbeddedDatabaseBuilder().addScript("classpath:schema-h2.sql") .setType(EmbeddedDatabaseType.H2) .build(); } @Bean PlatformTransactionManager transactionManager() { return new ResourcelessTransactionManager(); } } } ================================================ FILE: spring-cloud-task-batch/src/test/java/org/springframework/cloud/task/batch/listener/PrimaryKeyTests.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import java.util.Set; import javax.sql.DataSource; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.boot.SpringApplication; import org.springframework.cloud.task.batch.configuration.TaskBatchTest; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; /** * @author Henning Pöttker */ class PrimaryKeyTests { private ConfigurableApplicationContext applicationContext; @AfterEach void tearDown() { if (this.applicationContext != null && this.applicationContext.isActive()) { this.applicationContext.close(); } } @Disabled("Unable to find TaskRepository") @Test void testSchemaWithPrimaryKeys() { this.applicationContext = SpringApplication.run(JobConfiguration.class); TaskExplorer taskExplorer = this.applicationContext.getBean(TaskExplorer.class); Set jobIds = taskExplorer.getJobExecutionIdsByTaskExecutionId(1); assertThat(jobIds).containsExactly(1L); } @Configuration(proxyBeanMethods = false) @TaskBatchTest @EnableTask static class JobConfiguration { @Bean Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { return new JobBuilder("job", jobRepository) .start(new StepBuilder("step1", jobRepository).tasklet((contribution, chunkContext) -> { System.out.println("Executed"); return RepeatStatus.FINISHED; }, transactionManager).build()) .build(); } @Bean DataSource dataSource() { return new EmbeddedDatabaseBuilder().addScript("classpath:schema-with-primary-keys-h2.sql") .setType(EmbeddedDatabaseType.H2) .build(); } @Bean PlatformTransactionManager transactionManager() { return new ResourcelessTransactionManager(); } } } ================================================ FILE: spring-cloud-task-batch/src/test/java/org/springframework/cloud/task/batch/listener/TaskBatchExecutionListenerTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Set; import javax.sql.DataSource; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.SimpleJob; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.StepContribution; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.EmbeddedDataSourceConfiguration; import org.springframework.cloud.task.batch.configuration.TaskBatchAutoConfiguration; import org.springframework.cloud.task.batch.configuration.TaskBatchExecutionListenerBeanPostProcessor; import org.springframework.cloud.task.batch.configuration.TaskBatchTest; import org.springframework.cloud.task.configuration.DefaultTaskConfigurer; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.cloud.task.configuration.SingleTaskConfiguration; import org.springframework.cloud.task.configuration.TaskConfigurer; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * @author Michael Minella * @author Glenn Renfro */ @Disabled("Tests can not fined TaskRepository") public class TaskBatchExecutionListenerTests { private static final String[] ARGS = new String[] {}; private ConfigurableApplicationContext applicationContext; @AfterEach public void tearDown() { if (this.applicationContext != null && this.applicationContext.isActive()) { this.applicationContext.close(); } } @Test public void testAutobuiltDataSource() { this.applicationContext = SpringApplication.run(JobConfiguration.class, ARGS); validateContext(); } @Test public void testNoAutoConfigurationEnabled() { this.applicationContext = SpringApplication.run(JobConfiguration.class, "--spring.cloud.task.batch.listener.enabled=false"); assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> { validateContext(); }); } @Test public void testNoAutoConfigurationEnable() { this.applicationContext = SpringApplication.run(JobConfiguration.class, "--spring.cloud.task.batch.listener.enable=false"); assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> { validateContext(); }); } @Test public void testNoAutoConfigurationBothDisabled() { this.applicationContext = SpringApplication.run(JobConfiguration.class, "--spring.cloud.task.batch.listener.enable=false --spring.cloud.task.batch.listener.enabled=false"); assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> { validateContext(); }); } @Test public void testAutoConfigurationEnable() { this.applicationContext = SpringApplication.run(JobConfiguration.class, "--spring.cloud.task.batch.listener.enable=true"); validateContext(); } @Test public void testAutoConfigurationEnabled() { this.applicationContext = SpringApplication.run(JobConfiguration.class, "--spring.cloud.task.batch.listener.enabled=true"); validateContext(); } @Test public void testFactoryBean() { this.applicationContext = SpringApplication.run(JobFactoryBeanConfiguration.class, ARGS); validateContext(); } private void validateContext() { TaskExplorer taskExplorer = this.applicationContext.getBean(TaskExplorer.class); Page page = taskExplorer.findTaskExecutionsByName("application", PageRequest.of(0, 1)); Set jobExecutionIds = taskExplorer .getJobExecutionIdsByTaskExecutionId(page.iterator().next().getExecutionId()); assertThat(jobExecutionIds.size()).isEqualTo(1); assertThat(taskExplorer.getTaskExecution(jobExecutionIds.iterator().next()).getExecutionId()).isEqualTo(1); } @Test public void testNoListenerIfTaskNotEnabled() { this.applicationContext = SpringApplication.run(TaskNotEnabledConfiguration.class, ARGS); assertThat(applicationContext.getBean(Job.class)).isNotNull(); assertThatThrownBy(() -> applicationContext.getBean(TaskBatchExecutionListenerBeanPostProcessor.class)) .isInstanceOf(NoSuchBeanDefinitionException.class); assertThatThrownBy(() -> applicationContext.getBean(TaskBatchExecutionListener.class)) .isInstanceOf(NoSuchBeanDefinitionException.class); } @Test public void testMultipleDataSources() { this.applicationContext = SpringApplication.run(JobConfigurationMultipleDataSources.class, ARGS); TaskExplorer taskExplorer = this.applicationContext.getBean(TaskExplorer.class); Page page = taskExplorer.findTaskExecutionsByName("application", PageRequest.of(0, 1)); Set jobExecutionIds = taskExplorer .getJobExecutionIdsByTaskExecutionId(page.iterator().next().getExecutionId()); assertThat(jobExecutionIds.size()).isEqualTo(1); assertThat(taskExplorer.getTaskExecution(jobExecutionIds.iterator().next()).getExecutionId()).isEqualTo(1); } @Test public void testAutobuiltDataSourceNoJob() { this.applicationContext = SpringApplication.run(NoJobConfiguration.class, ARGS); TaskExplorer taskExplorer = this.applicationContext.getBean(TaskExplorer.class); Page page = taskExplorer.findTaskExecutionsByName("application", PageRequest.of(0, 1)); Set jobExecutionIds = taskExplorer .getJobExecutionIdsByTaskExecutionId(page.iterator().next().getExecutionId()); assertThat(jobExecutionIds.size()).isEqualTo(0); } @Test public void testMapBased() { this.applicationContext = SpringApplication.run(JobConfiguration.class, ARGS); TaskExplorer taskExplorer = this.applicationContext.getBean(TaskExplorer.class); Page page = taskExplorer.findTaskExecutionsByName("application", PageRequest.of(0, 1)); Set jobExecutionIds = taskExplorer .getJobExecutionIdsByTaskExecutionId(page.iterator().next().getExecutionId()); assertThat(jobExecutionIds.size()).isEqualTo(1); assertThat((long) taskExplorer.getTaskExecutionIdByJobExecutionId(jobExecutionIds.iterator().next())) .isEqualTo(1); } @Test public void testMultipleJobs() { this.applicationContext = SpringApplication.run(MultipleJobConfiguration.class, "--spring.batch.job.name=job1"); TaskExplorer taskExplorer = this.applicationContext.getBean(TaskExplorer.class); Page page = taskExplorer.findTaskExecutionsByName("application", PageRequest.of(0, 1)); Set jobExecutionIds = taskExplorer .getJobExecutionIdsByTaskExecutionId(page.iterator().next().getExecutionId()); assertThat(jobExecutionIds.size()).isEqualTo(1); Iterator jobExecutionIdsIterator = jobExecutionIds.iterator(); assertThat((long) taskExplorer.getTaskExecutionIdByJobExecutionId(jobExecutionIdsIterator.next())).isEqualTo(1); } @Test public void testBatchExecutionListenerBeanPostProcessorWithJobNames() { List jobNames = new ArrayList<>(3); jobNames.add("job1"); jobNames.add("job2"); jobNames.add("TESTOBJECT"); TaskBatchExecutionListenerBeanPostProcessor beanPostProcessor = beanPostProcessor(jobNames); SimpleJob testObject = new SimpleJob(); SimpleJob bean = (SimpleJob) beanPostProcessor.postProcessBeforeInitialization(testObject, "TESTOBJECT"); assertThat(bean).isEqualTo(testObject); } @Test public void testBatchExecutionListenerBeanPostProcessorWithEmptyJobNames() { TaskBatchExecutionListenerBeanPostProcessor beanPostProcessor = beanPostProcessor(Collections.emptyList()); SimpleJob testObject = new SimpleJob(); SimpleJob bean = (SimpleJob) beanPostProcessor.postProcessBeforeInitialization(testObject, "TESTOBJECT"); assertThat(bean).isEqualTo(testObject); } @Test public void testBatchExecutionListenerBeanPostProcessorNullJobNames() { assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { beanPostProcessor(null); }); } private TaskBatchExecutionListenerBeanPostProcessor beanPostProcessor(List jobNames) { this.applicationContext = SpringApplication.run(new Class[] { JobConfiguration.class, PropertyPlaceholderAutoConfiguration.class, EmbeddedDataSourceConfiguration.class, BatchAutoConfiguration.class, TaskBatchAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class }, ARGS); TaskBatchExecutionListenerBeanPostProcessor beanPostProcessor = this.applicationContext .getBean(TaskBatchExecutionListenerBeanPostProcessor.class); beanPostProcessor.setJobNames(jobNames); return beanPostProcessor; } @EnableBatchProcessing @TaskBatchTest @Import(EmbeddedDataSourceConfiguration.class) @EnableTask public static class NoJobConfiguration { @Bean PlatformTransactionManager transactionManager() { return new ResourcelessTransactionManager(); } } @TaskBatchTest @Import(EmbeddedDataSourceConfiguration.class) @EnableTask public static class JobConfiguration { @Bean public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { return new JobBuilder("job", jobRepository) .start(new StepBuilder("step1", jobRepository).tasklet((contribution, chunkContext) -> { System.out.println("Executed"); return RepeatStatus.FINISHED; }, transactionManager).build()) .build(); } @Bean PlatformTransactionManager transactionManager() { return new ResourcelessTransactionManager(); } } @EnableBatchProcessing @TaskBatchTest @Import(EmbeddedDataSourceConfiguration.class) public static class TaskNotEnabledConfiguration { @Bean public Job job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { return new JobBuilder("job", jobRepository) .start(new StepBuilder("step1", jobRepository).tasklet((contribution, chunkContext) -> { System.out.println("Executed"); return RepeatStatus.FINISHED; }, transactionManager).build()) .build(); } @Bean PlatformTransactionManager transactionManager() { return new ResourcelessTransactionManager(); } } @TaskBatchTest @EnableTask @Import(EmbeddedDataSourceConfiguration.class) public static class JobFactoryBeanConfiguration { @Bean public FactoryBean job(JobRepository jobRepository, PlatformTransactionManager transactionManager) { return new FactoryBean() { @Override public Job getObject() { return new JobBuilder("job", jobRepository) .start(new StepBuilder("step1", jobRepository).tasklet((contribution, chunkContext) -> { System.out.println("Executed"); return RepeatStatus.FINISHED; }, transactionManager).build()) .build(); } @Override public Class getObjectType() { return Job.class; } @Override public boolean isSingleton() { return true; } }; } @Bean PlatformTransactionManager transactionManager() { return new ResourcelessTransactionManager(); } } @TaskBatchTest @EnableTask @Import(EmbeddedDataSourceConfiguration.class) public static class JobConfigurationMultipleDataSources { @Bean public Job job(JobRepository jobRepository) { return new JobBuilder("job", jobRepository) .start(new StepBuilder("step1", jobRepository).tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { System.out.println("Executed"); return RepeatStatus.FINISHED; } }, new ResourcelessTransactionManager()).build()) .build(); } @Bean @Primary public DataSource myDataSource() { EmbeddedDatabaseBuilder builder = new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2) .setName("myDataSource"); return builder.build(); } @Bean public DataSource incorrectDataSource() { EmbeddedDatabaseBuilder builder = new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2) .setName("incorrectDataSource"); return builder.build(); } @Bean public TaskConfigurer taskConfigurer() { return new DefaultTaskConfigurer(myDataSource()); } @Bean PlatformTransactionManager transactionManager() { return new ResourcelessTransactionManager(); } } @TaskBatchTest @EnableTask @Import(EmbeddedDataSourceConfiguration.class) public static class MultipleJobConfiguration { @Bean public Job job1(JobRepository jobRepository, PlatformTransactionManager transactionManager) { return new JobBuilder("job1", jobRepository) .start(new StepBuilder("job1step1", jobRepository).tasklet((contribution, chunkContext) -> { System.out.println("Executed job1"); return RepeatStatus.FINISHED; }, transactionManager).build()) .build(); } @Bean public Job job2(JobRepository jobRepository, PlatformTransactionManager transactionManager) { return new JobBuilder("job2", jobRepository) .start(new StepBuilder("job2step1", jobRepository).tasklet((contribution, chunkContext) -> { System.out.println("Executed job2"); return RepeatStatus.FINISHED; }, transactionManager).build()) .build(); } @Bean PlatformTransactionManager transactionManager() { return new ResourcelessTransactionManager(); } } } ================================================ FILE: spring-cloud-task-batch/src/test/resources/META-INF/spring/org.springframework.cloud.task.batch.configuration.TaskBatchTest.imports ================================================ org.springframework.cloud.task.batch.configuration.TaskBatchAutoConfiguration org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration org.springframework.boot.batch.autoconfigure.BatchAutoConfiguration org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration org.springframework.cloud.task.configuration.SingleTaskConfiguration ================================================ FILE: spring-cloud-task-batch/src/test/resources/schema-h2.sql ================================================ CREATE TABLE FOO_EXECUTION ( TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , START_TIME TIMESTAMP DEFAULT NULL , END_TIME TIMESTAMP DEFAULT NULL , TASK_NAME VARCHAR(100) , EXIT_CODE INTEGER , EXIT_MESSAGE VARCHAR(2500) , ERROR_MESSAGE VARCHAR(2500) , LAST_UPDATED TIMESTAMP, EXTERNAL_EXECUTION_ID VARCHAR(255), PARENT_EXECUTION_ID BIGINT ); CREATE TABLE FOO_EXECUTION_PARAMS ( TASK_EXECUTION_ID BIGINT NOT NULL , TASK_PARAM VARCHAR(2500) , constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID) references FOO_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE TABLE FOO_TASK_BATCH ( TASK_EXECUTION_ID BIGINT NOT NULL , JOB_EXECUTION_ID BIGINT NOT NULL , constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID) references FOO_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE SEQUENCE FOO_SEQ ; CREATE TABLE FOO_LOCK ( LOCK_KEY CHAR(36) NOT NULL, REGION VARCHAR(100) NOT NULL, CLIENT_ID CHAR(36), CREATED_DATE TIMESTAMP NOT NULL, constraint LOCK_PK primary key (LOCK_KEY, REGION) ); ================================================ FILE: spring-cloud-task-batch/src/test/resources/schema-with-primary-keys-h2.sql ================================================ CREATE TABLE TASK_EXECUTION ( TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , START_TIME TIMESTAMP DEFAULT NULL , END_TIME TIMESTAMP DEFAULT NULL , TASK_NAME VARCHAR(100) , EXIT_CODE INTEGER , EXIT_MESSAGE VARCHAR(2500) , ERROR_MESSAGE VARCHAR(2500) , LAST_UPDATED TIMESTAMP, EXTERNAL_EXECUTION_ID VARCHAR(255), PARENT_EXECUTION_ID BIGINT ); CREATE TABLE TASK_EXECUTION_PARAMS ( ID BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, TASK_EXECUTION_ID BIGINT NOT NULL , TASK_PARAM VARCHAR(2500) , constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE TABLE TASK_TASK_BATCH ( ID BIGINT GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, TASK_EXECUTION_ID BIGINT NOT NULL , JOB_EXECUTION_ID BIGINT NOT NULL , constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE SEQUENCE TASK_SEQ ; CREATE TABLE TASK_LOCK ( LOCK_KEY CHAR(36) NOT NULL, REGION VARCHAR(100) NOT NULL, CLIENT_ID CHAR(36), CREATED_DATE TIMESTAMP NOT NULL, constraint LOCK_PK primary key (LOCK_KEY, REGION) ); ================================================ FILE: spring-cloud-task-core/pom.xml ================================================ 4.0.0 org.springframework.cloud spring-cloud-task-parent 5.0.2-SNAPSHOT spring-cloud-task-core jar Spring Cloud Task Core Spring Cloud Task org.springframework.boot spring-boot-autoconfigure org.springframework.boot spring-boot-sql org.springframework spring-jdbc ch.qos.logback logback-classic test org.springframework.boot spring-boot-jdbc test org.springframework.boot spring-boot-micrometer-metrics test org.springframework.boot spring-boot-micrometer-tracing test org.springframework.boot spring-boot-zipkin test org.springframework.boot spring-boot-starter-test test com.h2database h2 test org.apache.tomcat tomcat-jdbc test org.springframework.batch spring-batch-infrastructure org.springframework.data spring-data-commons org.springframework.integration spring-integration-core true org.springframework.integration spring-integration-jdbc true jakarta.platform jakarta.jakartaee-api ${jakarta-ee-api.version} true provided org.springframework spring-orm true org.hibernate.validator hibernate-validator test org.springframework.boot spring-boot-configuration-processor true org.junit.jupiter junit-jupiter-api test org.springframework.boot spring-boot-autoconfigure-processor true org.springframework.boot spring-boot-actuator-autoconfigure true io.micrometer micrometer-core io.micrometer micrometer-observation io.micrometer micrometer-test test io.micrometer micrometer-observation-test test org.junit.jupiter junit-jupiter-params test org.junit.jupiter junit-jupiter-engine test io.micrometer micrometer-tracing-test test io.micrometer micrometer-tracing-bridge-brave test io.zipkin.brave brave-tests test org.springframework.boot spring-boot-micrometer-tracing-brave test io.zipkin.reporter2 zipkin-reporter-brave test io.zipkin.reporter2 zipkin-sender-urlconnection test org.testcontainers testcontainers test org.testcontainers testcontainers-junit-jupiter test org.testcontainers testcontainers-mariadb test org.mariadb.jdbc mariadb-java-client test org.springframework.boot spring-boot-micrometer-tracing-test test ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/DefaultTaskConfigurer.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import javax.sql.DataSource; import jakarta.persistence.EntityManager; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskNameResolver; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.cloud.task.repository.dao.JdbcTaskExecutionDao; import org.springframework.cloud.task.repository.dao.MapTaskExecutionDao; import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; import org.springframework.cloud.task.repository.support.SimpleTaskNameResolver; import org.springframework.cloud.task.repository.support.SimpleTaskRepository; import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.context.ApplicationContext; import org.springframework.jdbc.support.JdbcTransactionManager; import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.transaction.PlatformTransactionManager; /** * Default implementation of the TaskConfigurer interface. If no {@link TaskConfigurer} * implementation is present, then this configuration will be used. The following defaults * will be used: *
    *
  • {@link SimpleTaskRepository} is the default {@link TaskRepository} returned. If a * data source is present then a data will be stored in the database * {@link JdbcTaskExecutionDao} else it will be stored in a map * {@link MapTaskExecutionDao}. *
* * @author Glenn Renfro * @author Michael Minella * @author Mahmoud Ben Hassine */ public class DefaultTaskConfigurer implements TaskConfigurer { private static final Log logger = LogFactory.getLog(DefaultTaskConfigurer.class); private TaskProperties taskProperties; private TaskRepository taskRepository; private TaskExplorer taskExplorer; private PlatformTransactionManager transactionManager; private DataSource dataSource; private ApplicationContext context; public DefaultTaskConfigurer() { this(TaskProperties.DEFAULT_TABLE_PREFIX); } /** * Initializes the DefaultTaskConfigurer and retrieves table prefix from * {@link TaskProperties}. */ public DefaultTaskConfigurer(TaskProperties taskProperties) { this(null, null, null, taskProperties); } /** * Initializes the DefaultTaskConfigurer and sets the default table prefix to * {@link TaskProperties#DEFAULT_TABLE_PREFIX}. * @param dataSource references the {@link DataSource} to be used as the Task * repository. If none is provided, a Map will be used (not recommended for production * use). */ public DefaultTaskConfigurer(DataSource dataSource) { this(dataSource, TaskProperties.DEFAULT_TABLE_PREFIX, null); } /** * Initializes the DefaultTaskConfigurer and retrieves table prefix from * * {@link TaskProperties}. * @param dataSource references the {@link DataSource} to be used as the Task * repository. If none is provided, a Map will be used (not recommended for production * use). * @param taskProperties the task properties used to obtain tablePrefix if not set by * tablePrefix field. */ public DefaultTaskConfigurer(DataSource dataSource, TaskProperties taskProperties) { this(dataSource, null, null, taskProperties); } /** * Initializes the DefaultTaskConfigurer. * @param tablePrefix the prefix to apply to the task table names used by task * infrastructure. */ public DefaultTaskConfigurer(String tablePrefix) { this(null, tablePrefix, null); } /** * Initializes the DefaultTaskConfigurer. * @param tablePrefix the prefix to apply to the task table names used by task * infrastructure. * @param taskProperties the task properties used to obtain tablePrefix if not set by * tablePrefix field. */ public DefaultTaskConfigurer(String tablePrefix, TaskProperties taskProperties) { this(null, tablePrefix, null, taskProperties); } /** * Initializes the DefaultTaskConfigurer. * @param dataSource references the {@link DataSource} to be used as the Task * repository. If none is provided, a Map will be used (not recommended for production * use). * @param tablePrefix the prefix to apply to the task table names used by task * infrastructure. * @param context the context to be used. */ public DefaultTaskConfigurer(DataSource dataSource, String tablePrefix, ApplicationContext context) { this(dataSource, tablePrefix, context, null); } /** * Initializes the DefaultTaskConfigurer. * @param dataSource references the {@link DataSource} to be used as the Task * repository. If none is provided, a Map will be used (not recommended for production * use). * @param tablePrefix the prefix to apply to the task table names used by task * infrastructure. * @param context the context to be used. * @param taskProperties the task properties used to obtain tablePrefix if not set by * tablePrefix field. */ public DefaultTaskConfigurer(DataSource dataSource, String tablePrefix, ApplicationContext context, TaskProperties taskProperties) { this.dataSource = dataSource; this.context = context; TaskExecutionDaoFactoryBean taskExecutionDaoFactoryBean; this.taskProperties = taskProperties; if (tablePrefix == null) { tablePrefix = (taskProperties != null && !taskProperties.getTablePrefix().isEmpty()) ? taskProperties.getTablePrefix() : TaskProperties.DEFAULT_TABLE_PREFIX; } if (this.dataSource != null) { taskExecutionDaoFactoryBean = new TaskExecutionDaoFactoryBean(this.dataSource, tablePrefix); } else { taskExecutionDaoFactoryBean = new TaskExecutionDaoFactoryBean(); } this.taskRepository = new SimpleTaskRepository(taskExecutionDaoFactoryBean); this.taskExplorer = new SimpleTaskExplorer(taskExecutionDaoFactoryBean); } @Override public TaskRepository getTaskRepository() { return this.taskRepository; } @Override public TaskExplorer getTaskExplorer() { return this.taskExplorer; } @Override public DataSource getTaskDataSource() { return this.dataSource; } @Override public TaskNameResolver getTaskNameResolver() { return new SimpleTaskNameResolver(); } @Override public PlatformTransactionManager getTransactionManager() { if (this.transactionManager == null) { if (isDataSourceAvailable()) { try { Class.forName("jakarta.persistence.EntityManager"); if (this.context != null && this.context.getBeanNamesForType(EntityManager.class).length > 0) { logger.debug("EntityManager was found, using JpaTransactionManager"); this.transactionManager = new JpaTransactionManager(); } } catch (ClassNotFoundException ignore) { logger.debug("No EntityManager was found, using DataSourceTransactionManager"); } finally { if (this.transactionManager == null) { this.transactionManager = new JdbcTransactionManager(this.dataSource); } } } else { logger.debug("No DataSource was found, using ResourcelessTransactionManager"); this.transactionManager = new ResourcelessTransactionManager(); } } return this.transactionManager; } private boolean isDataSourceAvailable() { return this.dataSource != null; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/EnableTask.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.springframework.context.annotation.Import; /** *

* Enables the {@link org.springframework.cloud.task.listener.TaskLifecycleListener} so * that the features of Spring Cloud Task will be applied. * *

 * @Configuration
 * @EnableTask
 * public class AppConfig {
 *
 * 	@Bean
 * 	public MyCommandLineRunner myCommandLineRunner() {
 * 		return new MyCommandLineRunner()
 *    }
 * }
 * 
* * Note that only one of your configuration classes needs to have the * @EnableTask annotation. Once you have an * @EnableTask class in your configuration the task will have the Spring * Cloud Task features available. * * @author Glenn Renfro * */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @Documented @Inherited @Import(TaskLifecycleConfiguration.class) public @interface EnableTask { } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/NoTransactionManagerProperty.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.condition.NoneNestedConditions; /** * A condition that verifies that the spring.cloud.task.transaction-manager property is * not being used. * * @author Glenn Renfro * @since 3.0 */ class NoTransactionManagerProperty extends NoneNestedConditions { NoTransactionManagerProperty() { super(ConfigurationPhase.REGISTER_BEAN); } @ConditionalOnProperty(prefix = "spring.cloud.task", name = "transaction-manager") static class OnProperty { } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/SimpleTaskAutoConfiguration.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import java.util.Arrays; import java.util.Collection; import javax.sql.DataSource; import jakarta.annotation.PostConstruct; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.aop.scope.ScopedProxyUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.sql.init.dependency.DatabaseInitializationDependencyConfigurer; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskNameResolver; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.cloud.task.repository.support.SimpleTaskRepository; import org.springframework.cloud.task.repository.support.TaskRepositoryInitializer; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Profile; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.EnableTransactionManagement; import org.springframework.util.CollectionUtils; /** * Base {@code Configuration} class providing common structure for enabling and using * Spring Task. Customization is available by implementing the {@link TaskConfigurer} * interface. * * @author Glenn Renfro * @author Michael Minella * @author Mahmoud Ben Hassine */ @AutoConfiguration @EnableTransactionManagement @EnableConfigurationProperties({ TaskProperties.class }) // @checkstyle:off @ConditionalOnProperty(prefix = "spring.cloud.task.autoconfiguration", name = "enabled", havingValue = "true", matchIfMissing = true) // @checkstyle:on @Import(DatabaseInitializationDependencyConfigurer.class) public class SimpleTaskAutoConfiguration { protected static final Log logger = LogFactory.getLog(SimpleTaskAutoConfiguration.class); @Autowired(required = false) private Collection dataSources; @Autowired private ConfigurableApplicationContext context; @Autowired private TaskProperties taskProperties; private boolean initialized = false; private TaskRepository taskRepository; private PlatformTransactionManager platformTransactionManager; private TaskExplorer taskExplorer; private TaskNameResolver taskNameResolver; @Bean public SimpleTaskRepository taskRepository() { return (SimpleTaskRepository) this.taskRepository; } @Conditional(NoTransactionManagerProperty.class) @Bean public PlatformTransactionManager springCloudTaskTransactionManager() { return this.platformTransactionManager; } @Bean public TaskExplorer taskExplorer() { return this.taskExplorer; } @Bean public TaskNameResolver taskNameResolver() { return taskNameResolver; } @Bean public TaskRepositoryInitializer taskRepositoryInitializer() { TaskRepositoryInitializer taskRepositoryInitializer = new TaskRepositoryInitializer(this.taskProperties); DataSource initializerDataSource = getDefaultConfigurer().getTaskDataSource(); if (initializerDataSource != null) { taskRepositoryInitializer.setDataSource(initializerDataSource); } return taskRepositoryInitializer; } @Bean @Profile("cloud") TaskObservationCloudKeyValues taskObservationCloudKeyValues() { return new TaskObservationCloudKeyValues(); } /** * Determines the {@link TaskConfigurer} to use. */ @PostConstruct protected void initialize() { if (this.initialized) { return; } TaskConfigurer taskConfigurer = getDefaultConfigurer(); logger.debug(String.format("Using %s TaskConfigurer", taskConfigurer.getClass().getName())); this.taskRepository = taskConfigurer.getTaskRepository(); this.platformTransactionManager = taskConfigurer.getTransactionManager(); this.taskExplorer = taskConfigurer.getTaskExplorer(); this.taskNameResolver = taskConfigurer.getTaskNameResolver(); this.initialized = true; } private TaskConfigurer getDefaultConfigurer() { verifyEnvironment(); int configurers = this.context.getBeanNamesForType(TaskConfigurer.class).length; if (configurers < 1) { TaskConfigurer taskConfigurer; if (!CollectionUtils.isEmpty(this.dataSources) && this.dataSources.size() == 1) { taskConfigurer = new DefaultTaskConfigurer(this.dataSources.iterator().next(), this.taskProperties.getTablePrefix(), this.context); } else { taskConfigurer = new DefaultTaskConfigurer(this.taskProperties.getTablePrefix()); } this.context.getBeanFactory().registerSingleton("taskConfigurer", taskConfigurer); return taskConfigurer; } else { if (configurers == 1) { return this.context.getBean(TaskConfigurer.class); } else { throw new IllegalStateException("Expected one TaskConfigurer but found " + configurers); } } } private void verifyEnvironment() { int configurers = this.context.getBeanNamesForType(TaskConfigurer.class).length; // retrieve the count of dataSources (without instantiating them) excluding // DataSource proxy beans long dataSources = Arrays.stream(this.context.getBeanNamesForType(DataSource.class)) .filter((name -> !ScopedProxyUtils.isScopedTarget(name))) .count(); if (configurers == 0 && dataSources > 1) { throw new IllegalStateException("To use the default TaskConfigurer the context must contain no more than" + " one DataSource, found " + dataSources); } } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/SingleInstanceTaskListener.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import java.time.Duration; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.cloud.task.listener.TaskExecutionException; import org.springframework.cloud.task.listener.annotation.AfterTask; import org.springframework.cloud.task.listener.annotation.BeforeTask; import org.springframework.cloud.task.listener.annotation.FailedTask; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskNameResolver; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationEventPublisher; import org.springframework.context.ApplicationListener; import org.springframework.integration.jdbc.lock.DefaultLockRepository; import org.springframework.integration.jdbc.lock.JdbcLockRegistry; import org.springframework.integration.leader.DefaultCandidate; import org.springframework.integration.leader.event.OnFailedToAcquireMutexEvent; import org.springframework.integration.leader.event.OnGrantedEvent; import org.springframework.integration.support.leader.LockRegistryLeaderInitiator; import org.springframework.integration.support.locks.LockRegistry; import org.springframework.transaction.PlatformTransactionManager; /** * When spring.cloud.task.single-instance-enabled is set to true this listener will create * a lock for the task based on the spring.cloud.task.name. If a lock already exists this * Listener will throw a TaskExecutionException. If this listener is added manually, then * it should be added as the first listener in the chain. * * @author Glenn Renfro * @author Mahmoud Ben Hassine * @since 2.0.0 */ public class SingleInstanceTaskListener implements ApplicationListener { private static final Log logger = LogFactory.getLog(SingleInstanceTaskListener.class); private LockRegistry lockRegistry; private LockRegistryLeaderInitiator lockRegistryLeaderInitiator; private TaskNameResolver taskNameResolver; private ApplicationEventPublisher applicationEventPublisher; private boolean lockReady; private boolean lockFailed; private DataSource dataSource; private TaskProperties taskProperties; private ApplicationContext applicationContext; private PlatformTransactionManager platformTransactionManager; public SingleInstanceTaskListener(LockRegistry lockRegistry, TaskNameResolver taskNameResolver, TaskProperties taskProperties, ApplicationEventPublisher applicationEventPublisher, ApplicationContext applicationContext) { this.lockRegistry = lockRegistry; this.taskNameResolver = taskNameResolver; this.taskProperties = taskProperties; this.lockRegistryLeaderInitiator = new LockRegistryLeaderInitiator(this.lockRegistry); this.applicationEventPublisher = applicationEventPublisher; this.applicationContext = applicationContext; } public SingleInstanceTaskListener(DataSource dataSource, TaskNameResolver taskNameResolver, TaskProperties taskProperties, ApplicationEventPublisher applicationEventPublisher, ApplicationContext applicationContext) { this.taskNameResolver = taskNameResolver; this.applicationEventPublisher = applicationEventPublisher; this.dataSource = dataSource; this.taskProperties = taskProperties; this.applicationContext = applicationContext; this.platformTransactionManager = this.applicationContext.getBean("springCloudTaskTransactionManager", PlatformTransactionManager.class); } @BeforeTask public void lockTask(TaskExecution taskExecution) { if (this.lockRegistry == null) { this.lockRegistry = getDefaultLockRegistry(taskExecution.getExecutionId()); } this.lockRegistryLeaderInitiator = new LockRegistryLeaderInitiator(this.lockRegistry, new DefaultCandidate( String.valueOf(taskExecution.getExecutionId()), this.taskNameResolver.getTaskName())); this.lockRegistryLeaderInitiator.setApplicationEventPublisher(this.applicationEventPublisher); this.lockRegistryLeaderInitiator.setPublishFailedEvents(true); this.lockRegistryLeaderInitiator.start(); while (!this.lockReady) { try { Thread.sleep(this.taskProperties.getSingleInstanceLockCheckInterval()); } catch (InterruptedException ex) { logger.warn("Thread Sleep Failed", ex); } if (this.lockFailed) { String errorMessage = String.format("Task with name \"%s\" is already running.", this.taskNameResolver.getTaskName()); try { this.lockRegistryLeaderInitiator.destroy(); } catch (Exception exception) { throw new TaskExecutionException("Failed to destroy lock.", exception); } throw new TaskExecutionException(errorMessage); } } } @AfterTask public void unlockTaskOnEnd(TaskExecution taskExecution) throws Exception { this.lockRegistryLeaderInitiator.destroy(); } @FailedTask public void unlockTaskOnError(TaskExecution taskExecution, Throwable throwable) throws Exception { this.lockRegistryLeaderInitiator.destroy(); } @Override public void onApplicationEvent(ApplicationEvent applicationEvent) { if (applicationEvent instanceof OnGrantedEvent) { this.lockReady = true; } else if (applicationEvent instanceof OnFailedToAcquireMutexEvent) { this.lockFailed = true; } } private LockRegistry getDefaultLockRegistry(long executionId) { DefaultLockRepository lockRepository = new DefaultLockRepository(this.dataSource, String.valueOf(executionId)); lockRepository.setPrefix(this.taskProperties.getTablePrefix()); lockRepository.setApplicationContext(this.applicationContext); lockRepository.afterPropertiesSet(); lockRepository.setTransactionManager(this.platformTransactionManager); lockRepository.afterSingletonsInstantiated(); return new JdbcLockRegistry(lockRepository, Duration.ofSeconds(this.taskProperties.getSingleInstanceLockTtl())); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/SingleTaskConfiguration.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.cloud.task.repository.TaskNameResolver; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationEventPublisher; import org.springframework.context.annotation.Bean; import org.springframework.core.Ordered; import org.springframework.core.annotation.Order; import org.springframework.integration.support.locks.PassThruLockRegistry; /** * Autoconfiguration of {@link SingleInstanceTaskListener}. * * @author Glenn Renfro * @since 2.0.0 */ @Order(Ordered.HIGHEST_PRECEDENCE) @AutoConfiguration @ConditionalOnProperty(prefix = "spring.cloud.task", name = "single-instance-enabled", havingValue = "true") public class SingleTaskConfiguration { @Autowired private TaskProperties taskProperties; @Autowired private ApplicationEventPublisher applicationEventPublisher; @Autowired private TaskConfigurer taskConfigurer; @Bean public SingleInstanceTaskListener taskListener(TaskNameResolver resolver, ApplicationContext applicationContext) { if (this.taskConfigurer.getTaskDataSource() == null) { return new SingleInstanceTaskListener(new PassThruLockRegistry(), resolver, this.taskProperties, this.applicationEventPublisher, applicationContext); } return new SingleInstanceTaskListener(this.taskConfigurer.getTaskDataSource(), resolver, this.taskProperties, this.applicationEventPublisher, applicationContext); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/TaskConfigurer.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import javax.sql.DataSource; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskNameResolver; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.transaction.PlatformTransactionManager; /** * Provides a strategy interface for providing configuration customization to the task * system. Users should not directly use getter methods from a TaskConfigurer * directly unless they are using it to supply the implementations for Spring Beans. * * @author Glenn Renfro */ public interface TaskConfigurer { /** * Create a {@link TaskRepository} for the Task. * @return A TaskRepository */ TaskRepository getTaskRepository(); /** * Create a {@link PlatformTransactionManager} for use with the * TaskRepository. * @return A PlatformTransactionManager */ PlatformTransactionManager getTransactionManager(); /** * Create a {@link TaskExplorer} for the task. * @return a TaskExplorer */ TaskExplorer getTaskExplorer(); /** * Retrieves the {@link DataSource} that will be used for task operations. If a * DataSource is not being used for the implemented TaskConfigurer this method will * return null. * @return {@link DataSource} that will be used for task operations. */ DataSource getTaskDataSource(); /** * Create a {@link TaskNameResolver} for use with the task application. * @return A TaskNameResolver */ TaskNameResolver getTaskNameResolver(); } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/TaskLifecycleConfiguration.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import io.micrometer.observation.ObservationRegistry; import jakarta.annotation.PostConstruct; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.ObjectProvider; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.ApplicationArguments; import org.springframework.cloud.task.listener.TaskLifecycleListener; import org.springframework.cloud.task.listener.TaskListenerExecutorObjectFactory; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskNameResolver; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; /** * Configuration for a {@link TaskLifecycleListener}. * * @author Glenn Renfro * @author Michael Minella * @since 2.1 */ @Configuration(proxyBeanMethods = false) public class TaskLifecycleConfiguration { protected static final Log logger = LogFactory.getLog(TaskLifecycleConfiguration.class); private TaskProperties taskProperties; private ConfigurableApplicationContext context; private ApplicationArguments applicationArguments; private TaskRepository taskRepository; private TaskExplorer taskExplorer; private TaskNameResolver taskNameResolver; private TaskLifecycleListener taskLifecycleListener; private boolean initialized = false; private ObservationRegistry observationRegistry; private TaskObservationCloudKeyValues taskObservationCloudKeyValues; @Autowired public TaskLifecycleConfiguration(TaskProperties taskProperties, ConfigurableApplicationContext context, TaskRepository taskRepository, TaskExplorer taskExplorer, TaskNameResolver taskNameResolver, ObjectProvider applicationArguments, @Autowired(required = false) ObservationRegistry observationRegistry, @Autowired(required = false) TaskObservationCloudKeyValues taskObservationCloudKeyValues) { this.taskProperties = taskProperties; this.context = context; this.taskRepository = taskRepository; this.taskExplorer = taskExplorer; this.taskNameResolver = taskNameResolver; this.applicationArguments = applicationArguments.getIfAvailable(); this.observationRegistry = observationRegistry == null ? ObservationRegistry.NOOP : observationRegistry; this.taskObservationCloudKeyValues = taskObservationCloudKeyValues; } @Bean public TaskLifecycleListener taskLifecycleListener() { return this.taskLifecycleListener; } /** * Initializes the {@link TaskLifecycleListener} for the task app. */ @PostConstruct protected void initialize() { if (!this.initialized) { this.taskLifecycleListener = new TaskLifecycleListener(this.taskRepository, this.taskNameResolver, this.applicationArguments, this.taskExplorer, this.taskProperties, new TaskListenerExecutorObjectFactory(this.context), this.observationRegistry, taskObservationCloudKeyValues); this.initialized = true; } } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/TaskObservationCloudKeyValues.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import org.springframework.beans.factory.annotation.Value; /** * Provides values for the {@link io.micrometer.common.KeyValues} for the task * {@link io.micrometer.observation.Observation} when the cloud profile is active. * * @author Glenn Renfro * @since 3.0 */ public class TaskObservationCloudKeyValues { @Value("${vcap.application.org_name:default}") private String organizationName; @Value("${vcap.application.space_id:unknown}") private String spaceId; @Value("${vcap.application.space_name:unknown}") private String spaceName; @Value("${vcap.application.application_name:unknown}") private String applicationName; @Value("${vcap.application.application_id:unknown}") private String applicationId; @Value("${vcap.application.application_version:unknown}") private String applicationVersion; @Value("${vcap.application.instance_index:0}") private String instanceIndex; public String getOrganizationName() { return organizationName; } public void setOrganizationName(String organizationName) { this.organizationName = organizationName; } public String getSpaceId() { return spaceId; } public void setSpaceId(String spaceId) { this.spaceId = spaceId; } public String getSpaceName() { return spaceName; } public void setSpaceName(String spaceName) { this.spaceName = spaceName; } public String getApplicationName() { return applicationName; } public void setApplicationName(String applicationName) { this.applicationName = applicationName; } public String getApplicationId() { return applicationId; } public void setApplicationId(String applicationId) { this.applicationId = applicationId; } public String getApplicationVersion() { return applicationVersion; } public void setApplicationVersion(String applicationVersion) { this.applicationVersion = applicationVersion; } public String getInstanceIndex() { return instanceIndex; } public void setInstanceIndex(String instanceIndex) { this.instanceIndex = instanceIndex; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/TaskProperties.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.boot.context.properties.ConfigurationProperties; /** * Properties available to configure the task. * * @author Glenn Renfro * @author David Turanski */ @ConfigurationProperties(prefix = "spring.cloud.task") public class TaskProperties { /** * Default table prefix for Spring Cloud Task tables. */ public static final String DEFAULT_TABLE_PREFIX = "TASK_"; private static final int DEFAULT_CHECK_INTERVAL = 500; private static final Log logger = LogFactory.getLog(TaskProperties.class); /** * An id that can be associated with a task. */ private String externalExecutionId; /** * An id that will be used by the task when updating the task execution. */ private Long executionid; /** * The id of the parent task execution id that launched this task execution. Defaults * to null if task execution had no parent. */ private Long parentExecutionId; /** * The prefix to append to the table names created by Spring Cloud Task. */ private String tablePrefix = DEFAULT_TABLE_PREFIX; /** * When set to true the context is closed at the end of the task. Else the context * remains open. */ private Boolean closecontextEnabled = false; /** * When set to true it will check to see if a task execution with the same task name * is already running. If a task is still running then it will throw a * {@link org.springframework.cloud.task.listener.TaskExecutionException}. When task * execution ends the lock is released. */ private boolean singleInstanceEnabled = false; /** * Declares the maximum amount of time (in millis) that a task execution can hold a * lock to prevent another task from executing with a specific task name when the * single-instance-enabled is set to true. Default time is: Integer.MAX_VALUE. */ private int singleInstanceLockTtl = Integer.MAX_VALUE; /** * Declares the time (in millis) that a task execution will wait between checks. * Default time is: 500 millis. */ private int singleInstanceLockCheckInterval = DEFAULT_CHECK_INTERVAL; /** * If set to true then tables are initialized. If set to false tables are not * initialized. Defaults to null. The requirement for it to be defaulted to null is so * that we can support the spring.cloud.task.initialize.enable until it * is removed. */ private Boolean initializeEnabled; public String getExternalExecutionId() { return this.externalExecutionId; } public void setExternalExecutionId(String externalExecutionId) { this.externalExecutionId = externalExecutionId; } public Long getExecutionid() { return this.executionid; } public void setExecutionid(Long executionid) { this.executionid = executionid; } public Boolean getClosecontextEnabled() { return this.closecontextEnabled; } public void setClosecontextEnabled(Boolean closecontextEnabled) { this.closecontextEnabled = closecontextEnabled; } public String getTablePrefix() { return this.tablePrefix; } public void setTablePrefix(String tablePrefix) { this.tablePrefix = tablePrefix; } public Long getParentExecutionId() { return this.parentExecutionId; } public void setParentExecutionId(Long parentExecutionId) { this.parentExecutionId = parentExecutionId; } public boolean getSingleInstanceEnabled() { return this.singleInstanceEnabled; } public void setSingleInstanceEnabled(boolean singleInstanceEnabled) { this.singleInstanceEnabled = singleInstanceEnabled; } public int getSingleInstanceLockTtl() { return this.singleInstanceLockTtl; } public void setSingleInstanceLockTtl(int singleInstanceLockTtl) { this.singleInstanceLockTtl = singleInstanceLockTtl; } public int getSingleInstanceLockCheckInterval() { return this.singleInstanceLockCheckInterval; } public void setSingleInstanceLockCheckInterval(int singleInstanceLockCheckInterval) { this.singleInstanceLockCheckInterval = singleInstanceLockCheckInterval; } public Boolean isInitializeEnabled() { return initializeEnabled; } public void setInitializeEnabled(Boolean initializeEnabled) { this.initializeEnabled = initializeEnabled; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/TaskRepositoryDatabaseInitializerDetector.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import java.util.Set; import org.springframework.boot.sql.init.dependency.AbstractBeansOfTypeDatabaseInitializerDetector; import org.springframework.boot.sql.init.dependency.DatabaseInitializerDetector; import org.springframework.cloud.task.repository.support.TaskRepositoryInitializer; import org.springframework.core.Ordered; /** * {@link DatabaseInitializerDetector} for {@link TaskRepositoryInitializer}. * * @author Henning Pöttker */ class TaskRepositoryDatabaseInitializerDetector extends AbstractBeansOfTypeDatabaseInitializerDetector { private static final int PRECEDENCE = Ordered.LOWEST_PRECEDENCE - 99; @Override protected Set> getDatabaseInitializerBeanTypes() { return Set.of(TaskRepositoryInitializer.class); } @Override public int getOrder() { return PRECEDENCE; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/TaskRepositoryDependsOnDatabaseInitializationDetector.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import java.util.Set; import org.springframework.boot.sql.init.dependency.AbstractBeansOfTypeDependsOnDatabaseInitializationDetector; import org.springframework.boot.sql.init.dependency.DependsOnDatabaseInitializationDetector; import org.springframework.cloud.task.repository.TaskRepository; /** * {@link DependsOnDatabaseInitializationDetector} for {@link TaskRepository}. * * @author Henning Pöttker */ class TaskRepositoryDependsOnDatabaseInitializationDetector extends AbstractBeansOfTypeDependsOnDatabaseInitializationDetector { @Override protected Set> getDependsOnDatabaseInitializationBeanTypes() { return Set.of(TaskRepository.class); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/TaskRuntimeHints.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import java.sql.Statement; import org.springframework.aot.hint.MemberCategory; import org.springframework.aot.hint.RuntimeHints; import org.springframework.aot.hint.RuntimeHintsRegistrar; import org.springframework.aot.hint.TypeReference; import org.springframework.util.ClassUtils; /** * Native Hints for Spring Cloud Task. * * @author Glenn Renfro * @author Mahmoud Ben Hassine * @since 3.0 */ public class TaskRuntimeHints implements RuntimeHintsRegistrar { @Override public void registerHints(RuntimeHints hints, ClassLoader classLoader) { hints.reflection().registerType(TypeReference.of("java.sql.DatabaseMetaData"), hint -> { }); hints.resources().registerPattern("org/springframework/cloud/task/schema-db2.sql"); hints.resources().registerPattern("org/springframework/cloud/task/schema-h2.sql"); hints.resources().registerPattern("org/springframework/cloud/task/schema-mysql.sql"); hints.resources().registerPattern("org/springframework/cloud/task/schema-mariadb.sql"); hints.resources().registerPattern("org/springframework/cloud/task/schema-oracle.sql"); hints.resources().registerPattern("org/springframework/cloud/task/schema-postgresql.sql"); hints.resources().registerPattern("org/springframework/cloud/task/schema-hsqldb.sql"); hints.resources().registerPattern("org/springframework/cloud/task/schema-sqlserver.sql"); hints.reflection() .registerType(TypeReference.of("org.springframework.boot.jdbc.init.DataSourceScriptDatabaseInitializer"), hint -> hint.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, MemberCategory.INVOKE_DECLARED_METHODS)); hints.reflection() .registerType(TypeReference.of("org.springframework.cloud.task.repository.TaskExecution"), hint -> hint .withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, MemberCategory.INVOKE_DECLARED_METHODS)); if (!ClassUtils.isPresent("com.zaxxer.hikari.HikariDataSource", classLoader)) { return; } hints.reflection().registerType(Statement[].class, hint -> { }); hints.reflection() .registerType(TypeReference.of("com.zaxxer.hikari.util.ConcurrentBag$IConcurrentBagEntry[]"), hint -> { }); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/observation/DefaultTaskObservationConvention.java ================================================ /* * Copyright 2013-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration.observation; import io.micrometer.common.KeyValues; import io.micrometer.observation.ObservationConvention; /** * {@link ObservationConvention} for Spring Cloud Task. * * @author Marcin Grzejszczak * @since 3.0.0 */ public class DefaultTaskObservationConvention implements TaskObservationConvention { @Override public KeyValues getLowCardinalityKeyValues(TaskObservationContext context) { return KeyValues.of(TaskDocumentedObservation.TaskRunnerTags.BEAN_NAME.withValue(context.getBeanName())); } @Override public String getName() { return "spring.cloud.task.runner"; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/observation/ObservationApplicationRunner.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration.observation; import io.micrometer.observation.Observation; import io.micrometer.observation.ObservationRegistry; import org.springframework.beans.factory.BeanFactory; import org.springframework.boot.ApplicationArguments; import org.springframework.boot.ApplicationRunner; /** * Observed representation of a {@link ApplicationRunner}. * * @author Marcin Grzejszczak */ class ObservationApplicationRunner implements ApplicationRunner { private static final DefaultTaskObservationConvention INSTANCE = new DefaultTaskObservationConvention(); private final BeanFactory beanFactory; private final ApplicationRunner delegate; private final String beanName; private ObservationRegistry registry; private TaskObservationConvention taskObservationConvention; ObservationApplicationRunner(BeanFactory beanFactory, ApplicationRunner delegate, String beanName) { this.beanFactory = beanFactory; this.delegate = delegate; this.beanName = beanName; } @Override public void run(ApplicationArguments args) throws Exception { TaskObservationContext context = new TaskObservationContext(this.beanName); Observation observation = TaskDocumentedObservation.TASK_RUNNER_OBSERVATION .observation(this.taskObservationConvention, INSTANCE, context, registry()) .contextualName(this.beanName); try (Observation.Scope scope = observation.start().openScope()) { this.delegate.run(args); } catch (Exception error) { observation.error(error); throw error; } finally { observation.stop(); } } private ObservationRegistry registry() { if (this.registry == null) { this.registry = this.beanFactory.getBean(ObservationRegistry.class); } return this.registry; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/observation/ObservationApplicationRunnerBeanPostProcessor.java ================================================ /* * Copyright 2013-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration.observation; import org.springframework.beans.BeansException; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.boot.ApplicationRunner; /** * Registers beans related to task scheduling. * * @author Marcin Grzejszczak */ class ObservationApplicationRunnerBeanPostProcessor implements BeanPostProcessor { private final BeanFactory beanFactory; ObservationApplicationRunnerBeanPostProcessor(BeanFactory beanFactory) { this.beanFactory = beanFactory; } @Override public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { if (bean instanceof ApplicationRunner applicationRunner && !(bean instanceof ObservationApplicationRunner)) { return new ObservationApplicationRunner(this.beanFactory, applicationRunner, beanName); } return bean; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/observation/ObservationCommandLineRunner.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration.observation; import io.micrometer.observation.Observation; import io.micrometer.observation.ObservationRegistry; import org.springframework.beans.factory.BeanFactory; import org.springframework.boot.CommandLineRunner; /** * Observed representation of a {@link CommandLineRunner}. * * @author Marcin Grzejszczak */ class ObservationCommandLineRunner implements CommandLineRunner { private static final DefaultTaskObservationConvention INSTANCE = new DefaultTaskObservationConvention(); private final BeanFactory beanFactory; private final CommandLineRunner delegate; private final String beanName; private ObservationRegistry registry; private TaskObservationConvention taskObservationConvention; ObservationCommandLineRunner(BeanFactory beanFactory, CommandLineRunner delegate, String beanName) { this.beanFactory = beanFactory; this.delegate = delegate; this.beanName = beanName; } @Override public void run(String... args) throws Exception { TaskObservationContext context = new TaskObservationContext(this.beanName); Observation observation = TaskDocumentedObservation.TASK_RUNNER_OBSERVATION .observation(this.taskObservationConvention, INSTANCE, context, registry()) .contextualName(this.beanName); try (Observation.Scope scope = observation.start().openScope()) { this.delegate.run(args); } catch (Exception error) { observation.error(error); throw error; } finally { observation.stop(); } } private ObservationRegistry registry() { if (this.registry == null) { this.registry = this.beanFactory.getBean(ObservationRegistry.class); } return this.registry; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/observation/ObservationCommandLineRunnerBeanPostProcessor.java ================================================ /* * Copyright 2013-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration.observation; import org.springframework.beans.BeansException; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.boot.CommandLineRunner; /** * Registers beans related to task scheduling. * * @author Marcin Grzejszczak */ class ObservationCommandLineRunnerBeanPostProcessor implements BeanPostProcessor { private final BeanFactory beanFactory; ObservationCommandLineRunnerBeanPostProcessor(BeanFactory beanFactory) { this.beanFactory = beanFactory; } @Override public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { if (bean instanceof CommandLineRunner commandLineRunner && !(bean instanceof ObservationCommandLineRunner)) { return new ObservationCommandLineRunner(this.beanFactory, commandLineRunner, beanName); } return bean; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/observation/ObservationTaskAutoConfiguration.java ================================================ /* * Copyright 2013-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration.observation; import io.micrometer.observation.ObservationRegistry; import org.springframework.beans.factory.BeanFactory; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; /** * {@link org.springframework.boot.autoconfigure.EnableAutoConfiguration * Auto-configuration} that registers instrumentation for Spring Cloud Task. * * @author Marcin Grzejszczak * @since 3.0.0 */ @AutoConfiguration @ConditionalOnClass(ObservationRegistry.class) @ConditionalOnProperty(value = "spring.cloud.task.observation.enabled", matchIfMissing = true) @ConditionalOnBean(ObservationRegistry.class) public class ObservationTaskAutoConfiguration { @Bean static ObservationCommandLineRunnerBeanPostProcessor observedCommandLineRunnerBeanPostProcessor( BeanFactory beanFactory) { return new ObservationCommandLineRunnerBeanPostProcessor(beanFactory); } @Bean static ObservationApplicationRunnerBeanPostProcessor observedApplicationRunnerBeanPostProcessor( BeanFactory beanFactory) { return new ObservationApplicationRunnerBeanPostProcessor(beanFactory); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/observation/TaskDocumentedObservation.java ================================================ /* * Copyright 2013-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration.observation; import io.micrometer.common.docs.KeyName; import io.micrometer.observation.Observation; import io.micrometer.observation.ObservationConvention; import io.micrometer.observation.docs.ObservationDocumentation; enum TaskDocumentedObservation implements ObservationDocumentation { /** * Observation created when a task runner is executed. */ TASK_RUNNER_OBSERVATION { @Override public Class> getDefaultConvention() { return DefaultTaskObservationConvention.class; } @Override public KeyName[] getLowCardinalityKeyNames() { return TaskRunnerTags.values(); } @Override public String getPrefix() { return "spring.cloud.task"; } }; /** * Key names for Spring Cloud Task Command / Application runners. */ enum TaskRunnerTags implements KeyName { /** * Name of the bean that was executed by Spring Cloud Task. */ BEAN_NAME { @Override public String asString() { return "spring.cloud.task.runner.bean-name"; } } } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/observation/TaskObservationContext.java ================================================ /* * Copyright 2013-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration.observation; import java.util.function.Supplier; import io.micrometer.observation.Observation; /** * {@link Observation.Context} for Spring Cloud Task. * * @author Marcin Grzejszczak * @since 3.0.0 */ public class TaskObservationContext extends Observation.Context implements Supplier { private final String beanName; public TaskObservationContext(String beanName) { this.beanName = beanName; } public String getBeanName() { return beanName; } @Override public TaskObservationContext get() { return this; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/observation/TaskObservationConvention.java ================================================ /* * Copyright 2013-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration.observation; import io.micrometer.observation.Observation; import io.micrometer.observation.ObservationConvention; /** * {@link ObservationConvention} for Spring Cloud Task. * * @author Marcin Grzejszczak * @since 3.0.0 */ public interface TaskObservationConvention extends ObservationConvention { @Override default boolean supportsContext(Observation.Context context) { return context instanceof TaskObservationContext; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/observation/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Observation support for Spring Cloud Task configuration. */ package org.springframework.cloud.task.configuration.observation; ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/configuration/package-info.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Interfaces for configuring Spring Cloud Task and a default implementations. */ package org.springframework.cloud.task.configuration; ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/DefaultTaskExecutionObservationConvention.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import io.micrometer.common.KeyValues; import org.springframework.cloud.task.repository.TaskExecution; /** * /** Default {@link TaskExecutionObservationConvention} implementation. * * @author Glenn Renfro * @since 3.0.0 */ public class DefaultTaskExecutionObservationConvention implements TaskExecutionObservationConvention { @Override public KeyValues getLowCardinalityKeyValues(TaskExecutionObservationContext context) { return getKeyValuesForTaskExecution(context); } @Override public KeyValues getHighCardinalityKeyValues(TaskExecutionObservationContext context) { return KeyValues.empty(); } private KeyValues getKeyValuesForTaskExecution(TaskExecutionObservationContext context) { TaskExecution execution = context.getTaskExecution(); return KeyValues.of(TaskExecutionObservation.TaskKeyValues.TASK_STATUS.asString(), context.getStatus(), TaskExecutionObservation.TaskKeyValues.TASK_EXIT_CODE.asString(), String.valueOf(execution.getExitCode()), TaskExecutionObservation.TaskKeyValues.TASK_EXECUTION_ID.asString(), String.valueOf(execution.getExecutionId())); } @Override public String getName() { return "spring.cloud.task"; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/TaskException.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; /** * Base Exception for any Task issues. * * @author Glenn Renfro */ public class TaskException extends RuntimeException { public TaskException(String message, Throwable e) { super(message, e); } public TaskException(String message) { super(message); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/TaskExecutionException.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; /** * Is thrown when executing a task. * * @author Glenn Renfro. */ public class TaskExecutionException extends TaskException { public TaskExecutionException(String message) { super(message); } public TaskExecutionException(String message, Throwable throwable) { super(message, throwable); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/TaskExecutionListener.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskRepository; /** * The listener interface for receiving task execution events. * * @author Glenn Renfro */ public interface TaskExecutionListener { /** * Invoked after the {@link TaskExecution} has been stored in the * {@link TaskRepository}. * @param taskExecution instance containing the information about the current task. */ default void onTaskStartup(TaskExecution taskExecution) { } /** * Invoked before the {@link TaskExecution} has been updated in the * {@link TaskRepository} upon task end. * @param taskExecution instance containing the information about the current task. */ default void onTaskEnd(TaskExecution taskExecution) { } /** * Invoked if an uncaught exception occurs during a task execution. This invocation * will occur before the {@link TaskExecution} has been updated in the * {@link TaskRepository} and before the onTaskEnd is called. * @param taskExecution instance containing the information about the current task. * @param throwable the uncaught exception that was thrown during task execution. */ default void onTaskFailed(TaskExecution taskExecution, Throwable throwable) { } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/TaskExecutionListenerSupport.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; /** * A no-op implementation of the {@link TaskExecutionListener} to allow for overriding * only the methods of interest. * * @author Michael Minella * @since 1.2 * * {@link TaskExecutionListener} * @deprecated since 3.0 in favor of the default implementations of */ @Deprecated public class TaskExecutionListenerSupport implements TaskExecutionListener { } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/TaskExecutionObservation.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import io.micrometer.common.docs.KeyName; import io.micrometer.observation.Observation; import io.micrometer.observation.ObservationConvention; import io.micrometer.observation.docs.ObservationDocumentation; /** * Enumeration for task execution observations. * * @author Glenn Renfro * @since 3.0.0 */ public enum TaskExecutionObservation implements ObservationDocumentation { /** * Metrics created around a task execution. */ TASK_ACTIVE { @Override public Class> getDefaultConvention() { return DefaultTaskExecutionObservationConvention.class; } @Override public KeyName[] getLowCardinalityKeyNames() { return TaskKeyValues.values(); } public String getPrefix() { return "spring.cloud.task"; } }; public enum TaskKeyValues implements KeyName { /** * Task name measurement. */ TASK_NAME { public String asString() { return "spring.cloud.task.name"; } }, /** * Task execution id. */ TASK_EXECUTION_ID { @Override public String asString() { return "spring.cloud.task.execution.id"; } }, /** * Task parent execution id. */ TASK_PARENT_EXECUTION_ID { @Override public String asString() { return "spring.cloud.task.parent.execution.id"; } }, /** * External execution id for task. */ TASK_EXTERNAL_EXECUTION_ID { @Override public String asString() { return "spring.cloud.task.external.execution.id"; } }, /** * Task exit code. */ TASK_EXIT_CODE { @Override public String asString() { return "spring.cloud.task.exit.code"; } }, /** * task status. Can be either success or failure. */ TASK_STATUS { @Override public String asString() { return "spring.cloud.task.status"; } }, /** * Organization Name for CF cloud. */ TASK_CF_ORG_NAME { @Override public String asString() { return "spring.cloud.task.cf.org.name"; } }, /** * Space id for CF cloud. */ TASK_CF_SPACE_ID { @Override public String asString() { return "spring.cloud.task.cf.space.id"; } }, /** * Space name for CF cloud. */ TASK_CF_SPACE_NAME { @Override public String asString() { return "spring.cloud.task.cf.space.name"; } }, /** * App name for CF cloud. */ TASK_CF_APP_NAME { @Override public String asString() { return "spring.cloud.task.cf.app.name"; } }, /** * App id for CF cloud. */ TASK_CF_APP_ID { @Override public String asString() { return "spring.cloud.task.cf.app.id"; } }, /** * App version for CF cloud. */ TASK_CF_APP_VERSION { @Override public String asString() { return "spring.cloud.task.cf.app.version"; } }, /** * Instance index for CF cloud. */ TASK_CF_INSTANCE_INDEX { @Override public String asString() { return "spring.cloud.task.cf.instance.index"; } } } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/TaskExecutionObservationContext.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import java.util.function.Supplier; import io.micrometer.observation.Observation; import io.micrometer.observation.ObservationHandler; import org.springframework.cloud.task.repository.TaskExecution; /** * A mutable holder of the {@link TaskExecution} required by a {@link ObservationHandler}. * * @author Glenn Renfro * @since 3.0.0 */ public class TaskExecutionObservationContext extends Observation.Context implements Supplier { private final TaskExecution taskExecution; private String exceptionMessage = "none"; private String status = "success"; public TaskExecutionObservationContext(TaskExecution taskExecution) { this.taskExecution = taskExecution; } public TaskExecution getTaskExecution() { return taskExecution; } public String getExceptionMessage() { return exceptionMessage; } public void setExceptionMessage(String exceptionMessage) { this.exceptionMessage = exceptionMessage; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } @Override public TaskExecutionObservationContext get() { return this; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/TaskExecutionObservationConvention.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import io.micrometer.observation.Observation; import io.micrometer.observation.ObservationConvention; /** * {@link ObservationConvention} for {@link TaskExecutionObservationContext}. * * @author Glenn Renfro * @since 3.0.0 */ public interface TaskExecutionObservationConvention extends ObservationConvention { @Override default boolean supportsContext(Observation.Context context) { return context instanceof TaskExecutionObservationContext; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/TaskLifecycleListener.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import java.io.PrintWriter; import java.io.StringWriter; import java.lang.reflect.InvocationTargetException; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import io.micrometer.observation.ObservationConvention; import io.micrometer.observation.ObservationRegistry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.ApplicationArguments; import org.springframework.boot.ExitCodeEvent; import org.springframework.boot.ExitCodeGenerator; import org.springframework.boot.context.event.ApplicationFailedEvent; import org.springframework.boot.context.event.ApplicationReadyEvent; import org.springframework.cloud.task.configuration.TaskObservationCloudKeyValues; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskNameResolver; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationListener; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.SmartLifecycle; import org.springframework.core.Ordered; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; /** * Monitors the lifecycle of a task. This listener will record both the start and end of a * task in the registered {@link TaskRepository}. * * The following events are used to identify the start and end of a task: * *
    *
  • {@link SmartLifecycle#start()} - Used to identify the start of a task. A task is * expected to contain a single application context.
  • *
  • {@link ApplicationReadyEvent} - Used to identify the successful end of a task.
  • *
  • {@link ApplicationFailedEvent} - Used to identify the failure of a task.
  • *
  • {@link SmartLifecycle#stop()} - Used to identify the end of a task, if the * {@link ApplicationReadyEvent} or {@link ApplicationFailedEvent} is not emitted. This * can occur if an error occurs while executing a BeforeTask.
  • *
* * Note: By default, the context will close at the completion of the task unless * other non-daemon threads keep it running. Programatic closing of the context can be * configured via the property spring.cloud.task.closecontext_enabled * (defaults to false). If the spring.cloud.task.closecontext_enabled is set * to true, then the context will be closed upon task completion regardless if non-daemon * threads are still running. Also if the context did not start, the FailedTask and * TaskEnd may not have all the dependencies met. * * @author Michael Minella * @author Glenn Renfro */ public class TaskLifecycleListener implements ApplicationListener, SmartLifecycle, DisposableBean, Ordered { private static final Log logger = LogFactory.getLog(TaskLifecycleListener.class); private final TaskRepository taskRepository; private final TaskExplorer taskExplorer; private final TaskListenerExecutorObjectFactory taskListenerExecutorObjectFactory; private final TaskObservations taskObservations; @Autowired private ConfigurableApplicationContext context; @Autowired(required = false) private Collection taskExecutionListenersFromContext; @Autowired(required = false) private ObservationConvention observationConvention; private List taskExecutionListeners; private TaskExecution taskExecution; private TaskProperties taskProperties; private boolean started = false; private boolean finished = false; private boolean listenerFailed = false; private Throwable listenerException; private TaskNameResolver taskNameResolver; private ApplicationArguments applicationArguments; private Throwable applicationFailedException; private ExitCodeEvent exitCodeEvent; /** * @param taskRepository {@link TaskRepository} to record executions. * @param taskNameResolver {@link TaskNameResolver} used to determine task name for * task execution. * @param applicationArguments {@link ApplicationArguments} to be used for task * execution. * @param taskExplorer {@link TaskExplorer} to be used for task execution. * @param taskProperties {@link TaskProperties} to be used for the task execution. * @param taskListenerExecutorObjectFactory {@link TaskListenerExecutorObjectFactory} * to initialize TaskListenerExecutor for a task */ public TaskLifecycleListener(TaskRepository taskRepository, TaskNameResolver taskNameResolver, ApplicationArguments applicationArguments, TaskExplorer taskExplorer, TaskProperties taskProperties, TaskListenerExecutorObjectFactory taskListenerExecutorObjectFactory, @Autowired(required = false) ObservationRegistry observationRegistry, TaskObservationCloudKeyValues taskObservationCloudKeyValues) { Assert.notNull(taskRepository, "A taskRepository is required"); Assert.notNull(taskNameResolver, "A taskNameResolver is required"); Assert.notNull(taskExplorer, "A taskExplorer is required"); Assert.notNull(taskProperties, "TaskProperties is required"); Assert.notNull(taskListenerExecutorObjectFactory, "A TaskListenerExecutorObjectFactory is required"); this.taskRepository = taskRepository; this.taskNameResolver = taskNameResolver; this.applicationArguments = applicationArguments; this.taskExplorer = taskExplorer; this.taskProperties = taskProperties; this.taskListenerExecutorObjectFactory = taskListenerExecutorObjectFactory; observationRegistry = observationRegistry == null ? ObservationRegistry.NOOP : observationRegistry; this.taskObservations = new TaskObservations(observationRegistry, taskObservationCloudKeyValues, observationConvention); } /** * Utilizes {@link ApplicationEvent}s to determine the end and failure of a task. * Specifically: *
    *
  • {@link ApplicationReadyEvent} - Successful end of a task
  • *
  • {@link ApplicationFailedEvent} - Failure of a task
  • *
* @param applicationEvent The application being listened for. */ @Override public void onApplicationEvent(ApplicationEvent applicationEvent) { if (applicationEvent instanceof ApplicationFailedEvent) { this.applicationFailedException = ((ApplicationFailedEvent) applicationEvent).getException(); doTaskEnd(); } else if (applicationEvent instanceof ExitCodeEvent) { this.exitCodeEvent = (ExitCodeEvent) applicationEvent; } else if (applicationEvent instanceof ApplicationReadyEvent) { doTaskEnd(); } } private String stackTraceToString(Throwable exception) { StringWriter writer = new StringWriter(); PrintWriter printWriter = new PrintWriter(writer); exception.printStackTrace(printWriter); return writer.toString(); } private void doTaskEnd() { if ((this.listenerFailed || this.started) && !this.finished) { this.taskExecution.setEndTime(LocalDateTime.now()); if (this.applicationFailedException != null) { this.taskExecution.setErrorMessage(stackTraceToString(this.applicationFailedException)); } this.taskExecution.setExitCode(calcExitStatus()); if (this.applicationFailedException != null) { setExitMessage(invokeOnTaskError(this.taskExecution, this.applicationFailedException)); } setExitMessage(invokeOnTaskEnd(this.taskExecution)); this.taskRepository.completeTaskExecution(this.taskExecution.getExecutionId(), this.taskExecution.getExitCode(), this.taskExecution.getEndTime(), this.taskExecution.getExitMessage(), this.taskExecution.getErrorMessage()); this.finished = true; if (this.taskProperties.getClosecontextEnabled() && this.context.isActive()) { this.context.close(); } } else if (!this.started) { logger.error("An event to end a task has been received for a task that has " + "not yet started."); } } private void setExitMessage(TaskExecution taskExecutionParam) { if (taskExecutionParam.getExitMessage() != null) { this.taskExecution.setExitMessage(taskExecutionParam.getExitMessage()); } } private int calcExitStatus() { int exitCode = 0; if (this.exitCodeEvent != null) { exitCode = this.exitCodeEvent.getExitCode(); } else if (this.listenerFailed || this.applicationFailedException != null) { Throwable exception = this.listenerException; if (exception instanceof TaskExecutionException) { TaskExecutionException taskExecutionException = (TaskExecutionException) exception; if (taskExecutionException.getCause() instanceof InvocationTargetException) { InvocationTargetException invocationTargetException = (InvocationTargetException) taskExecutionException .getCause(); if (invocationTargetException != null && invocationTargetException.getTargetException() != null) { exception = invocationTargetException.getTargetException(); } } } if (exception instanceof ExitCodeGenerator) { exitCode = ((ExitCodeGenerator) exception).getExitCode(); } else { exitCode = 1; } } return exitCode; } private void doTaskStart() { try { if (!this.started) { this.taskExecutionListeners = new ArrayList<>(); this.taskListenerExecutorObjectFactory.getObject(); if (!CollectionUtils.isEmpty(this.taskExecutionListenersFromContext)) { this.taskExecutionListeners.addAll(this.taskExecutionListenersFromContext); } this.taskExecutionListeners.add(this.taskListenerExecutorObjectFactory.getObject()); List args = new ArrayList<>(0); if (this.applicationArguments != null) { args = Arrays.asList(this.applicationArguments.getSourceArgs()); } if (this.taskProperties.getExecutionid() != null) { TaskExecution taskExecution = this.taskExplorer .getTaskExecution(this.taskProperties.getExecutionid()); Assert.notNull(taskExecution, String.format("Invalid TaskExecution, ID %s not found", this.taskProperties.getExecutionid())); Assert.isNull(taskExecution.getEndTime(), String.format("Invalid TaskExecution, ID %s task is already complete", this.taskProperties.getExecutionid())); LocalDateTime startDate = (taskExecution.getStartTime() == null) ? LocalDateTime.now() : taskExecution.getStartTime(); this.taskExecution = this.taskRepository.startTaskExecution(this.taskProperties.getExecutionid(), this.taskNameResolver.getTaskName(), startDate, args, this.taskProperties.getExternalExecutionId(), this.taskProperties.getParentExecutionId()); } else { TaskExecution taskExecution = new TaskExecution(); taskExecution.setTaskName(this.taskNameResolver.getTaskName()); taskExecution.setStartTime(LocalDateTime.now()); taskExecution.setArguments(args); taskExecution.setExternalExecutionId(this.taskProperties.getExternalExecutionId()); taskExecution.setParentExecutionId(this.taskProperties.getParentExecutionId()); this.taskExecution = this.taskRepository.createTaskExecution(taskExecution); } } else { logger.error("Multiple start events have been received. The first one was " + "recorded."); } setExitMessage(invokeOnTaskStartup(this.taskExecution)); } catch (Throwable t) { // This scenario will result in a context that was not startup. this.applicationFailedException = t; this.doTaskEnd(); throw t; } } private TaskExecution invokeOnTaskStartup(TaskExecution taskExecution) { this.taskObservations.onTaskStartup(taskExecution); TaskExecution listenerTaskExecution = getTaskExecutionCopy(taskExecution); List startupListenerList = new ArrayList<>(this.taskExecutionListeners); if (!CollectionUtils.isEmpty(startupListenerList)) { try { Collections.reverse(startupListenerList); for (TaskExecutionListener taskExecutionListener : startupListenerList) { taskExecutionListener.onTaskStartup(listenerTaskExecution); } } catch (Throwable currentListenerException) { logger.error(currentListenerException); this.listenerFailed = true; this.taskExecution.setErrorMessage(currentListenerException.getMessage()); this.listenerException = currentListenerException; throw currentListenerException; } } return listenerTaskExecution; } private TaskExecution invokeOnTaskEnd(TaskExecution taskExecution) { if (this.taskObservations != null) { this.taskObservations.onTaskEnd(taskExecution); } TaskExecution listenerTaskExecution = getTaskExecutionCopy(taskExecution); if (this.taskExecutionListeners != null) { try { for (TaskExecutionListener taskExecutionListener : this.taskExecutionListeners) { taskExecutionListener.onTaskEnd(listenerTaskExecution); } } catch (Throwable listenerException) { String errorMessage = stackTraceToString(listenerException); if (StringUtils.hasText(listenerTaskExecution.getErrorMessage())) { errorMessage = String.format("%s :Task also threw this Exception: %s", errorMessage, listenerTaskExecution.getErrorMessage()); } logger.error(errorMessage); listenerTaskExecution.setErrorMessage(errorMessage); this.listenerFailed = true; } } return listenerTaskExecution; } private TaskExecution invokeOnTaskError(TaskExecution taskExecution, Throwable throwable) { if (this.taskObservations != null) { this.taskObservations.onTaskFailed(throwable); } TaskExecution listenerTaskExecution = getTaskExecutionCopy(taskExecution); if (this.taskExecutionListeners != null) { try { for (TaskExecutionListener taskExecutionListener : this.taskExecutionListeners) { taskExecutionListener.onTaskFailed(listenerTaskExecution, throwable); } } catch (Throwable listenerException) { this.listenerFailed = true; String errorMessage; if (StringUtils.hasText(listenerTaskExecution.getErrorMessage())) { errorMessage = String.format("%s :While handling " + "this error: %s", listenerException.getMessage(), listenerTaskExecution.getErrorMessage()); } else { errorMessage = listenerTaskExecution.getErrorMessage(); } logger.error(errorMessage); listenerTaskExecution.setErrorMessage(errorMessage); listenerTaskExecution.setExitCode(1); } } return listenerTaskExecution; } private TaskExecution getTaskExecutionCopy(TaskExecution taskExecution) { LocalDateTime startTime = taskExecution.getStartTime(); LocalDateTime endTime = taskExecution.getEndTime(); return new TaskExecution(taskExecution.getExecutionId(), taskExecution.getExitCode(), taskExecution.getTaskName(), startTime, endTime, taskExecution.getExitMessage(), Collections.unmodifiableList(taskExecution.getArguments()), taskExecution.getErrorMessage(), taskExecution.getExternalExecutionId(), taskExecution.getParentExecutionId()); } @Override public boolean isAutoStartup() { return true; } @Override public void stop(Runnable callback) { Assert.notNull(callback, "A callback is required"); stop(); callback.run(); } @Override public void start() { doTaskStart(); this.started = true; } @Override public void stop() { this.doTaskEnd(); } @Override public boolean isRunning() { return this.started; } @Override public int getPhase() { return 0; } @Override public void destroy() { } @Override public int getOrder() { return HIGHEST_PRECEDENCE; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/TaskListenerExecutorObjectFactory.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.aop.framework.autoproxy.AutoProxyUtils; import org.springframework.aop.scope.ScopedObject; import org.springframework.aop.scope.ScopedProxyUtils; import org.springframework.beans.factory.BeanInitializationException; import org.springframework.beans.factory.ObjectFactory; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.cloud.task.listener.annotation.AfterTask; import org.springframework.cloud.task.listener.annotation.BeforeTask; import org.springframework.cloud.task.listener.annotation.FailedTask; import org.springframework.cloud.task.listener.annotation.TaskListenerExecutor; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.core.MethodIntrospector; import org.springframework.core.annotation.AnnotationUtils; /** * Initializes TaskListenerExecutor for a task. * * @author Glenn Renfro * @author Isik Erhan * @since 2.1.0 */ public class TaskListenerExecutorObjectFactory implements ObjectFactory { private static final Log logger = LogFactory.getLog(TaskListenerExecutor.class); private final Set> nonAnnotatedClasses = Collections.newSetFromMap(new ConcurrentHashMap<>()); private ConfigurableApplicationContext context; private Map> beforeTaskInstances; private Map> afterTaskInstances; private Map> failedTaskInstances; public TaskListenerExecutorObjectFactory(ConfigurableApplicationContext context) { this.context = context; } @Override public TaskListenerExecutor getObject() { this.beforeTaskInstances = new HashMap<>(); this.afterTaskInstances = new HashMap<>(); this.failedTaskInstances = new HashMap<>(); initializeExecutor(); return new TaskListenerExecutor(this.beforeTaskInstances, this.afterTaskInstances, this.failedTaskInstances); } private void initializeExecutor() { ConfigurableListableBeanFactory factory = this.context.getBeanFactory(); for (String beanName : this.context.getBeanDefinitionNames()) { if (!ScopedProxyUtils.isScopedTarget(beanName)) { Class type = null; try { type = AutoProxyUtils.determineTargetClass(factory, beanName); } catch (RuntimeException ex) { // An unresolvable bean type, probably from a lazy bean - let's ignore // it. if (logger.isDebugEnabled()) { logger.debug("Could not resolve target class for bean with name '" + beanName + "'", ex); } } if (type != null) { if (ScopedObject.class.isAssignableFrom(type)) { try { type = AutoProxyUtils.determineTargetClass(factory, ScopedProxyUtils.getTargetBeanName(beanName)); } catch (RuntimeException ex) { // An invalid scoped proxy arrangement - let's ignore it. if (logger.isDebugEnabled()) { logger.debug("Could not resolve target bean for scoped proxy '" + beanName + "'", ex); } } } try { processBean(beanName, type); } catch (RuntimeException ex) { throw new BeanInitializationException( "Failed to process @BeforeTask " + "annotation on bean with name '" + beanName + "'", ex); } } } } } private void processBean(String beanName, final Class type) { if (!this.nonAnnotatedClasses.contains(type)) { Map beforeTaskMethods = (new MethodGetter()).getMethods(type, BeforeTask.class); Map afterTaskMethods = (new MethodGetter()).getMethods(type, AfterTask.class); Map failedTaskMethods = (new MethodGetter()).getMethods(type, FailedTask.class); if (beforeTaskMethods.isEmpty() && afterTaskMethods.isEmpty()) { this.nonAnnotatedClasses.add(type); return; } if (!beforeTaskMethods.isEmpty()) { for (Method beforeTaskMethod : beforeTaskMethods.keySet()) { this.beforeTaskInstances.computeIfAbsent(beforeTaskMethod, k -> new LinkedHashSet<>()) .add(this.context.getBean(beanName)); } } if (!afterTaskMethods.isEmpty()) { for (Method afterTaskMethod : afterTaskMethods.keySet()) { this.afterTaskInstances.computeIfAbsent(afterTaskMethod, k -> new LinkedHashSet<>()) .add(this.context.getBean(beanName)); } } if (!failedTaskMethods.isEmpty()) { for (Method failedTaskMethod : failedTaskMethods.keySet()) { this.failedTaskInstances.computeIfAbsent(failedTaskMethod, k -> new LinkedHashSet<>()) .add(this.context.getBean(beanName)); } } } } private static final class MethodGetter { public Map getMethods(final Class type, final Class annotationClass) { return MethodIntrospector.selectMethods(type, (MethodIntrospector.MetadataLookup) method -> AnnotationUtils.findAnnotation(method, annotationClass)); } } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/TaskObservations.java ================================================ /* * Copyright 2019-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import io.micrometer.observation.Observation; import io.micrometer.observation.ObservationConvention; import io.micrometer.observation.ObservationRegistry; import org.springframework.cloud.task.configuration.TaskObservationCloudKeyValues; import org.springframework.cloud.task.repository.TaskExecution; /** * Utility class for publishing Spring Cloud Task specific Observations via Micrometer. * Intended for internal use only. * * @author Christian Tzolov * @author Glenn Renfro * @since 2.2 */ public class TaskObservations { /** * Successful task execution status indicator. */ public static final String STATUS_SUCCESS = "success"; /** * Failing task execution status indicator. */ public static final String STATUS_FAILURE = "failure"; /** * Default for when value is not present. */ public static final String UNKNOWN = "unknown"; private ObservationRegistry observationRegistry; private ObservationConvention customObservationConvention; public TaskObservations(ObservationRegistry observationRegistry, TaskObservationCloudKeyValues taskObservationCloudKeyValues, ObservationConvention customObservationConvention) { this.observationRegistry = observationRegistry; this.taskObservationCloudKeyValues = taskObservationCloudKeyValues; this.customObservationConvention = customObservationConvention; } private Observation.Scope scope; private TaskExecutionObservationConvention observationsProvider = new DefaultTaskExecutionObservationConvention(); private TaskExecutionObservationContext taskObservationContext; TaskObservationCloudKeyValues taskObservationCloudKeyValues; public void onTaskStartup(TaskExecution taskExecution) { this.taskObservationContext = new TaskExecutionObservationContext(taskExecution); Observation observation = TaskExecutionObservation.TASK_ACTIVE .observation(this.customObservationConvention, new DefaultTaskExecutionObservationConvention(), this.taskObservationContext, this.observationRegistry) .contextualName(String.valueOf(taskExecution.getExecutionId())) .observationConvention(this.observationsProvider) .lowCardinalityKeyValue(TaskExecutionObservation.TaskKeyValues.TASK_NAME.asString(), getValueOrDefault(taskExecution.getTaskName())) .lowCardinalityKeyValue(TaskExecutionObservation.TaskKeyValues.TASK_EXECUTION_ID.asString(), "" + taskExecution.getExecutionId()) .lowCardinalityKeyValue(TaskExecutionObservation.TaskKeyValues.TASK_PARENT_EXECUTION_ID.asString(), (getValueOrDefault(taskExecution.getParentExecutionId()))) .lowCardinalityKeyValue(TaskExecutionObservation.TaskKeyValues.TASK_EXTERNAL_EXECUTION_ID.asString(), (getValueOrDefault(taskExecution.getExternalExecutionId()))); if (taskObservationCloudKeyValues != null) { observation.lowCardinalityKeyValue(TaskExecutionObservation.TaskKeyValues.TASK_CF_ORG_NAME.asString(), this.taskObservationCloudKeyValues.getOrganizationName()); observation.lowCardinalityKeyValue(TaskExecutionObservation.TaskKeyValues.TASK_CF_SPACE_ID.asString(), this.taskObservationCloudKeyValues.getSpaceId()); observation.lowCardinalityKeyValue(TaskExecutionObservation.TaskKeyValues.TASK_CF_SPACE_NAME.asString(), this.taskObservationCloudKeyValues.getSpaceName()); observation.lowCardinalityKeyValue(TaskExecutionObservation.TaskKeyValues.TASK_CF_APP_ID.asString(), this.taskObservationCloudKeyValues.getApplicationId()); observation.lowCardinalityKeyValue(TaskExecutionObservation.TaskKeyValues.TASK_CF_APP_NAME.asString(), this.taskObservationCloudKeyValues.getApplicationName()); observation.lowCardinalityKeyValue(TaskExecutionObservation.TaskKeyValues.TASK_CF_APP_VERSION.asString(), this.taskObservationCloudKeyValues.getApplicationVersion()); observation.lowCardinalityKeyValue(TaskExecutionObservation.TaskKeyValues.TASK_CF_INSTANCE_INDEX.asString(), this.taskObservationCloudKeyValues.getInstanceIndex()); } observation.start(); this.scope = observation.openScope(); } private String getValueOrDefault(Object value) { return (value != null) ? value.toString() : UNKNOWN; } public void onTaskFailed(Throwable throwable) { this.taskObservationContext.setStatus(STATUS_FAILURE); this.scope.getCurrentObservation().error(throwable); } public void onTaskEnd(TaskExecution taskExecution) { if (this.scope != null) { this.taskObservationContext.getTaskExecution().setExitCode(taskExecution.getExitCode()); this.scope.close(); this.scope.getCurrentObservation().stop(); } } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/annotation/AfterTask.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener.annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.springframework.aot.hint.annotation.Reflective; import org.springframework.cloud.task.listener.TaskExecutionListener; import org.springframework.cloud.task.repository.TaskExecution; /** *

* {@link TaskExecutionListener#onTaskEnd(TaskExecution)}. *

* *
 * public class MyListener {
 * 	@AfterTask
 * 	public void  doSomething(TaskExecution taskExecution) {
 *    }
 * }
 * 
* * @author Glenn Renfro */ @Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) @Retention(RetentionPolicy.RUNTIME) @Documented @Reflective public @interface AfterTask { } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/annotation/BeforeTask.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener.annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.springframework.aot.hint.annotation.Reflective; import org.springframework.cloud.task.listener.TaskExecutionListener; import org.springframework.cloud.task.repository.TaskExecution; /** *

* {@link TaskExecutionListener#onTaskStartup(TaskExecution)}. *

* *
 * public class MyListener {
 * 	@BeforeTask
 * 	public void  doSomething(TaskExecution taskExecution) {
 *    }
 * }
 * 
* * @author Glenn Renfro */ @Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) @Retention(RetentionPolicy.RUNTIME) @Documented @Reflective public @interface BeforeTask { } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/annotation/FailedTask.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener.annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.springframework.aot.hint.annotation.Reflective; import org.springframework.cloud.task.listener.TaskExecutionListener; import org.springframework.cloud.task.repository.TaskExecution; /** *

* {@link TaskExecutionListener#onTaskFailed(TaskExecution, Throwable)}. *

* *
 * public class MyListener {
 * 	@FailedTask
 * 	public void  doSomething(TaskExecution taskExecution, Throwable throwable) {
 *    }
 * }
 * 
* * @author Glenn Renfro */ @Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) @Retention(RetentionPolicy.RUNTIME) @Documented @Reflective public @interface FailedTask { } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/annotation/TaskListenerExecutor.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener.annotation; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Map; import java.util.Set; import org.springframework.cloud.task.listener.TaskExecutionException; import org.springframework.cloud.task.listener.TaskExecutionListener; import org.springframework.cloud.task.repository.TaskExecution; /** * Identifies all beans that contain a TaskExecutionListener annotation and stores the * associated method so that it can be called by the {@link TaskExecutionListener} at the * appropriate time. * * @author Glenn Renfro * @author Isik Erhan */ public class TaskListenerExecutor implements TaskExecutionListener { private Map> beforeTaskInstances; private Map> afterTaskInstances; private Map> failedTaskInstances; public TaskListenerExecutor(Map> beforeTaskInstances, Map> afterTaskInstances, Map> failedTaskInstances) { this.beforeTaskInstances = beforeTaskInstances; this.afterTaskInstances = afterTaskInstances; this.failedTaskInstances = failedTaskInstances; } /** * Executes all the methods that have been annotated with @BeforeTask. * @param taskExecution associated with the event. */ @Override public void onTaskStartup(TaskExecution taskExecution) { executeTaskListener(taskExecution, this.beforeTaskInstances.keySet(), this.beforeTaskInstances); } /** * Executes all the methods that have been annotated with @AfterTask. * @param taskExecution associated with the event. */ @Override public void onTaskEnd(TaskExecution taskExecution) { executeTaskListener(taskExecution, this.afterTaskInstances.keySet(), this.afterTaskInstances); } /** * Executes all the methods that have been annotated with @FailedTask. * @param throwable that was not caught for the task execution. * @param taskExecution associated with the event. */ @Override public void onTaskFailed(TaskExecution taskExecution, Throwable throwable) { executeTaskListenerWithThrowable(taskExecution, throwable, this.failedTaskInstances.keySet(), this.failedTaskInstances); } private void executeTaskListener(TaskExecution taskExecution, Set methods, Map> instances) { for (Method method : methods) { for (Object instance : instances.get(method)) { try { method.invoke(instance, taskExecution); } catch (IllegalAccessException e) { throw new TaskExecutionException("@BeforeTask and @AfterTask annotated methods must be public.", e); } catch (InvocationTargetException e) { throw new TaskExecutionException( String.format("Failed to process @BeforeTask or @AfterTask" + " annotation because: %s", e.getTargetException().getMessage()), e); } catch (IllegalArgumentException e) { throw new TaskExecutionException( "taskExecution parameter " + "is required for @BeforeTask and @AfterTask annotated methods", e); } } } } private void executeTaskListenerWithThrowable(TaskExecution taskExecution, Throwable throwable, Set methods, Map> instances) { for (Method method : methods) { for (Object instance : instances.get(method)) { try { method.invoke(instance, taskExecution, throwable); } catch (IllegalAccessException e) { throw new TaskExecutionException("@FailedTask annotated methods must be public.", e); } catch (InvocationTargetException e) { throw new TaskExecutionException( String.format("Failed to process @FailedTask " + "annotation because: %s", e.getTargetException().getMessage()), e); } catch (IllegalArgumentException e) { throw new TaskExecutionException("taskExecution and throwable parameters " + "are required for @FailedTask annotated methods", e); } } } } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/annotation/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Annotation-based listener support for Spring Cloud Task. */ package org.springframework.cloud.task.listener.annotation; ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/listener/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Task lifecycle listener support for Spring Cloud Task. */ package org.springframework.cloud.task.listener; ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Base package for spring cloud task. */ package org.springframework.cloud.task; ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/TaskExecution.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.List; import org.springframework.util.Assert; /** * Represents the state of the Task for each execution. * * @author Glenn Renfro * @author Michael Minella * @author Ilayaperumal Gopinathan */ public class TaskExecution { /** * The unique id associated with the task execution. */ private long executionId; /** * The parent task execution id. */ private Long parentExecutionId; /** * The recorded exit code for the task. */ private Integer exitCode; /** * User defined name for the task. */ private String taskName; /** * Time of when the task was started. */ private LocalDateTime startTime; /** * Timestamp of when the task was completed/terminated. */ private LocalDateTime endTime; /** * Message returned from the task or stacktrace. */ private String exitMessage; /** * Id assigned to the task by the platform. * * @since 1.1.0 */ private String externalExecutionId; /** * Error information available upon the failure of a task. * * @since 1.1.0 */ private String errorMessage; /** * The arguments that were used for this task execution. */ private List arguments; public TaskExecution() { this.arguments = new ArrayList<>(); } public TaskExecution(long executionId, Integer exitCode, String taskName, LocalDateTime startTime, LocalDateTime endTime, String exitMessage, List arguments, String errorMessage, String externalExecutionId, Long parentExecutionId) { Assert.notNull(arguments, "arguments must not be null"); this.executionId = executionId; this.exitCode = exitCode; this.taskName = taskName; this.exitMessage = exitMessage; this.arguments = new ArrayList<>(arguments); this.startTime = startTime; this.endTime = endTime; this.errorMessage = errorMessage; this.externalExecutionId = externalExecutionId; this.parentExecutionId = parentExecutionId; } public TaskExecution(long executionId, Integer exitCode, String taskName, LocalDateTime startTime, LocalDateTime endTime, String exitMessage, List arguments, String errorMessage, String externalExecutionId) { this(executionId, exitCode, taskName, startTime, endTime, exitMessage, arguments, errorMessage, externalExecutionId, null); } public long getExecutionId() { return this.executionId; } public Integer getExitCode() { return this.exitCode; } public void setExitCode(Integer exitCode) { this.exitCode = exitCode; } public String getTaskName() { return this.taskName; } public void setTaskName(String taskName) { this.taskName = taskName; } public LocalDateTime getStartTime() { return this.startTime; } public void setStartTime(LocalDateTime startTime) { this.startTime = startTime; } public LocalDateTime getEndTime() { return this.endTime; } public void setEndTime(LocalDateTime endTime) { this.endTime = endTime; } public String getExitMessage() { return this.exitMessage; } public void setExitMessage(String exitMessage) { this.exitMessage = exitMessage; } public List getArguments() { return this.arguments; } public void setArguments(List arguments) { this.arguments = new ArrayList<>(arguments); } public String getErrorMessage() { return this.errorMessage; } public void setErrorMessage(String errorMessage) { this.errorMessage = errorMessage; } public String getExternalExecutionId() { return this.externalExecutionId; } public void setExternalExecutionId(String externalExecutionId) { this.externalExecutionId = externalExecutionId; } public Long getParentExecutionId() { return this.parentExecutionId; } public void setParentExecutionId(Long parentExecutionId) { this.parentExecutionId = parentExecutionId; } @Override public String toString() { return "TaskExecution{" + "executionId=" + this.executionId + ", parentExecutionId=" + this.parentExecutionId + ", exitCode=" + this.exitCode + ", taskName='" + this.taskName + '\'' + ", startTime=" + this.startTime + ", endTime=" + this.endTime + ", exitMessage='" + this.exitMessage + '\'' + ", externalExecutionId='" + this.externalExecutionId + '\'' + ", errorMessage='" + this.errorMessage + '\'' + ", arguments=" + this.arguments + '}'; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/TaskExplorer.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository; import java.util.List; import java.util.Set; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; /** * Offers methods that allow users to query the task executions that are available. * * @author Glenn Renfro * @author Michael Minella * @author Gunnar Hillert */ public interface TaskExplorer { /** * Retrieve a {@link TaskExecution} by its id. * @param executionId the task execution id * @return the {@link TaskExecution} with this id, or null if not found */ TaskExecution getTaskExecution(long executionId); /** * Retrieve a collection of taskExecutions that have the task name provided. * @param taskName the name of the task * @param pageable the constraints for the search * @return the set of running executions for tasks with the specified name */ Page findRunningTaskExecutions(String taskName, Pageable pageable); /** * Retrieve a collection of taskExecutions that contain the provided external * execution id. * @param externalExecutionId the external execution id of the tasks * @param pageable the constraints for the search * @return the set of task executions for tasks with the external execution id */ Page findTaskExecutionsByExecutionId(String externalExecutionId, Pageable pageable); /** * Retrieve a list of available task names. * @return the set of task names that have been executed */ List getTaskNames(); /** * Get number of executions for a taskName. * @param taskName the name of the task to be searched * @return the number of running tasks that have the taskname specified */ long getTaskExecutionCountByTaskName(String taskName); /** * Retrieves current number of task executions. * @return current number of task executions. */ long getTaskExecutionCount(); /** * Retrieves current number of running task executions. * @return current number of running task executions. */ long getRunningTaskExecutionCount(); /** * Retrieves current number of task executions by external executionId. * @param externalExecutionId The externalExecutionId to be searched. * @return current number of task executions for a specific externalExecutionId. */ long getTaskExecutionCountByExternalExecutionId(String externalExecutionId); /** * Get a collection/page of executions. * @param taskName the name of the task to be searched * @param pageable the constraints for the search * @return list of task executions */ Page findTaskExecutionsByName(String taskName, Pageable pageable); /** * Retrieves all the task executions within the pageable constraints sorted by start * date descending, taskExecution id descending. * @param pageable the constraints for the search * @return page containing the results from the search */ Page findAll(Pageable pageable); /** * Returns the id of the TaskExecution that the requested Spring Batch job execution * was executed within the context of. Returns null if none were found. * @param jobExecutionId the id of the JobExecution * @return the id of the {@link TaskExecution} */ Long getTaskExecutionIdByJobExecutionId(long jobExecutionId); /** * Returns a Set of JobExecution ids for the jobs that were executed within the scope * of the requested task. * @param taskExecutionId id of the {@link TaskExecution} * @return a Set of the ids of the job executions executed within the * task. */ Set getJobExecutionIdsByTaskExecutionId(long taskExecutionId); /** * Returns a {@link List} of the latest {@link TaskExecution} for 1 or more task * names. * * Latest is defined by the most recent start time. A {@link TaskExecution} does not * have to be finished (The results may including pending {@link TaskExecution}s). * * It is theoretically possible that a {@link TaskExecution} with the same name to * have more than 1 {@link TaskExecution} for the exact same start time. In that case * the {@link TaskExecution} with the highest Task Execution ID is returned. * * This method will not consider end times in its calculations. Thus, when a task * execution {@code A} starts after task execution {@code B} but finishes BEFORE task * execution {@code A}, then task execution {@code B} is being returned. * @param taskNames At least 1 task name must be provided * @return List of TaskExecutions. May be empty but never null. */ List getLatestTaskExecutionsByTaskNames(String... taskNames); /** * Returns the latest task execution for a given task name. Will ultimately apply the * same algorithm underneath as {@link #getLatestTaskExecutionsByTaskNames(String...)} * but will only return a single result. * @param taskName Must not be null or empty * @return The latest Task Execution or null * @see #getLatestTaskExecutionsByTaskNames(String...) */ TaskExecution getLatestTaskExecutionForTaskName(String taskName); } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/TaskNameResolver.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository; /** * Strategy interface for customizing how the name of a task is determined. * * @author Michael Minella */ public interface TaskNameResolver { /** * @return the name of the task being executed within this context. */ String getTaskName(); } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/TaskRepository.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository; import java.time.LocalDateTime; import java.util.List; import org.springframework.transaction.annotation.Transactional; /** * TaskRepository interface offers methods that create and update task execution * information. * * @author Glenn Renfro * @author Michael Minella * @author Mahmoud Ben Hassine */ public interface TaskRepository { /** * Notifies the repository that a taskExecution has completed. * @param executionId to the task execution to be updated. * @param exitCode to be stored for this task. * @param endTime designated when the task completed. * @param exitMessage to be stored for the task. * @return the updated {@link TaskExecution} */ @Transactional("${spring.cloud.task.transaction-manager:springCloudTaskTransactionManager}") TaskExecution completeTaskExecution(long executionId, Integer exitCode, LocalDateTime endTime, String exitMessage); /** * Notifies the repository that a taskExecution has completed. * @param executionId to the task execution to be updated. * @param exitCode to be stored for this task execution. * @param endTime designated when the task completed. * @param exitMessage to be stored for the task execution. * @param errorMessage to be stored for the task execution. * @return the updated {@link TaskExecution} * @since 1.1.0 */ @Transactional("${spring.cloud.task.transaction-manager:springCloudTaskTransactionManager}") TaskExecution completeTaskExecution(long executionId, Integer exitCode, LocalDateTime endTime, String exitMessage, String errorMessage); /** * Notifies the repository that a taskExecution needs to be created. * @param taskExecution a TaskExecution instance containing the startTime, arguments * and externalExecutionId that will be stored in the repository. Only the values * enumerated above will be stored for this TaskExecution. * @return the {@link TaskExecution} that was stored in the repository. The * TaskExecution's taskExecutionId will also contain the id that was used to store the * TaskExecution. */ @Transactional("${spring.cloud.task.transaction-manager:springCloudTaskTransactionManager}") TaskExecution createTaskExecution(TaskExecution taskExecution); /** * Creates an empty TaskExecution with just an id and name provided. This is intended * to be utilized in systems where the request of launching a task is separate from * the actual start of a task (the underlying system may need to deploy the task prior * to launching, etc). * @param name task name to be associated with the task execution. * @return the initial {@link TaskExecution} */ @Transactional("${spring.cloud.task.transaction-manager:springCloudTaskTransactionManager}") TaskExecution createTaskExecution(String name); /** * Creates an empty TaskExecution with just an id provided. This is intended to be * utilized in systems where the request of launching a task is separate from the * actual start of a task (the underlying system may need to deploy the task prior to * launching, etc). * @return the initial {@link TaskExecution} */ @Transactional("${spring.cloud.task.transaction-manager:springCloudTaskTransactionManager}") TaskExecution createTaskExecution(); /** * Notifies the repository that a taskExecution has has started. * @param executionid to the task execution to be updated. * @param taskName the name that associated with the task execution. * @param startTime the time task began. * @param arguments list of key/value pairs that configure the task. * @param externalExecutionId id assigned to the task by the platform. * @return TaskExecution created based on the parameters. */ @Transactional("${spring.cloud.task.transaction-manager:springCloudTaskTransactionManager}") TaskExecution startTaskExecution(long executionid, String taskName, LocalDateTime startTime, List arguments, String externalExecutionId); /** * Notifies the repository to update the taskExecution's externalExecutionId. * @param executionid to the task execution to be updated. * @param externalExecutionId id assigned to the task by the platform. */ @Transactional("${spring.cloud.task.transaction-manager:springCloudTaskTransactionManager}") void updateExternalExecutionId(long executionid, String externalExecutionId); /** * Notifies the repository that a taskExecution has has started. * @param executionid to the task execution to be updated. * @param taskName the name that associated with the task execution. * @param startTime the time task began. * @param arguments list of key/value pairs that configure the task. * @param externalExecutionId id assigned to the task by the platform. * @param parentExecutionId the parent task execution id. * @return A TaskExecution that contains the information available at the beginning of * a TaskExecution. */ @Transactional("${spring.cloud.task.transaction-manager:springCloudTaskTransactionManager}") TaskExecution startTaskExecution(long executionid, String taskName, LocalDateTime startTime, List arguments, String externalExecutionId, Long parentExecutionId); } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/dao/JdbcTaskExecutionDao.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.dao; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.sql.Types; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TreeSet; import javax.sql.DataSource; import org.springframework.batch.infrastructure.item.database.Order; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.cloud.task.repository.database.support.SqlPagingQueryProviderFactoryBean; import org.springframework.dao.DataAccessException; import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.jdbc.core.ResultSetExtractor; import org.springframework.jdbc.core.RowCallbackHandler; import org.springframework.jdbc.core.RowMapper; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; /** * Stores Task Execution Information to a JDBC DataSource. * * @author Glenn Renfro * @author Gunnar Hillert * @author David Turanski * @author Ilayaperumal Gopinathan * @author Michael Minella */ public class JdbcTaskExecutionDao implements TaskExecutionDao { /** * SELECT clause for task execution. */ public static final String SELECT_CLAUSE = "TASK_EXECUTION_ID, " + "START_TIME, END_TIME, TASK_NAME, EXIT_CODE, " + "EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, " + "EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID "; /** * FROM clause for task execution. */ public static final String FROM_CLAUSE = "%PREFIX%EXECUTION"; /** * WHERE clause for running task. */ public static final String RUNNING_TASK_WHERE_CLAUSE = "where TASK_NAME = :taskName AND END_TIME IS NULL "; /** * WHERE clause for task name. */ public static final String TASK_NAME_WHERE_CLAUSE = "where TASK_NAME = :taskName "; /** * WHERE clause for external execution id. */ public static final String EXTERNAL_EXECUTION_ID_WHERE_CLAUSE = "where EXTERNAL_EXECUTION_ID = :externalExecutionId "; private static final String SAVE_TASK_EXECUTION = "INSERT into %PREFIX%EXECUTION" + "(TASK_EXECUTION_ID, EXIT_CODE, START_TIME, TASK_NAME, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID)" + "values (:taskExecutionId, :exitCode, :startTime, " + ":taskName, :lastUpdated, :externalExecutionId, :parentExecutionId)"; private static final String CREATE_TASK_ARGUMENT = "INSERT into " + "%PREFIX%EXECUTION_PARAMS(TASK_EXECUTION_ID, TASK_PARAM ) values (:taskExecutionId, :taskParam)"; private static final String START_TASK_EXECUTION_PREFIX = "UPDATE %PREFIX%EXECUTION set " + "START_TIME = :startTime, TASK_NAME = :taskName, LAST_UPDATED = :lastUpdated"; private static final String START_TASK_EXECUTION_EXTERNAL_ID_SUFFIX = ", " + "EXTERNAL_EXECUTION_ID = :externalExecutionId, PARENT_EXECUTION_ID = :parentExecutionId " + "where TASK_EXECUTION_ID = :taskExecutionId"; private static final String START_TASK_EXECUTION_SUFFIX = ", PARENT_EXECUTION_ID = :parentExecutionId " + "where TASK_EXECUTION_ID = :taskExecutionId"; private static final String CHECK_TASK_EXECUTION_EXISTS = "SELECT COUNT(*) FROM " + "%PREFIX%EXECUTION WHERE TASK_EXECUTION_ID = :taskExecutionId"; private static final String UPDATE_TASK_EXECUTION = "UPDATE %PREFIX%EXECUTION set " + "END_TIME = :endTime, EXIT_CODE = :exitCode, EXIT_MESSAGE = :exitMessage, ERROR_MESSAGE = :errorMessage, " + "LAST_UPDATED = :lastUpdated where TASK_EXECUTION_ID = :taskExecutionId"; private static final String UPDATE_TASK_EXECUTION_EXTERNAL_EXECUTION_ID = "UPDATE %PREFIX%EXECUTION set " + "EXTERNAL_EXECUTION_ID = :externalExecutionId where TASK_EXECUTION_ID = :taskExecutionId"; private static final String GET_EXECUTION_BY_ID = "SELECT TASK_EXECUTION_ID, " + "START_TIME, END_TIME, TASK_NAME, EXIT_CODE, " + "EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, " + "PARENT_EXECUTION_ID " + "from %PREFIX%EXECUTION where TASK_EXECUTION_ID = :taskExecutionId"; private static final String FIND_ARGUMENT_FROM_ID = "SELECT TASK_EXECUTION_ID, " + "TASK_PARAM from %PREFIX%EXECUTION_PARAMS where TASK_EXECUTION_ID = :taskExecutionId"; private static final String TASK_EXECUTION_COUNT = "SELECT COUNT(*) FROM " + "%PREFIX%EXECUTION "; private static final String TASK_EXECUTION_COUNT_BY_NAME = "SELECT COUNT(*) FROM " + "%PREFIX%EXECUTION where TASK_NAME = :taskName"; private static final String TASK_EXECUTION_COUNT_BY_EXTERNAL_EXECUTION_ID = "SELECT COUNT(*) FROM " + "%PREFIX%EXECUTION where EXTERNAL_EXECUTION_ID = :externalExecutionId"; private static final String RUNNING_TASK_EXECUTION_COUNT_BY_NAME = "SELECT COUNT(*) FROM " + "%PREFIX%EXECUTION where TASK_NAME = :taskName AND END_TIME IS NULL "; private static final String RUNNING_TASK_EXECUTION_COUNT = "SELECT COUNT(*) FROM " + "%PREFIX%EXECUTION where END_TIME IS NULL "; private static final String LAST_TASK_EXECUTIONS_BY_TASK_NAMES = "select TE2.* from (" + "select MAX(TE.TASK_EXECUTION_ID) as TASK_EXECUTION_ID, TE.TASK_NAME, TE.START_TIME from (" + "select TASK_NAME, MAX(START_TIME) as START_TIME" + " FROM %PREFIX%EXECUTION where TASK_NAME in (:taskNames)" + " GROUP BY TASK_NAME" + ") TE_MAX " + "inner join %PREFIX%EXECUTION TE ON TE.TASK_NAME = TE_MAX.TASK_NAME AND TE.START_TIME = TE_MAX.START_TIME " + "group by TE.TASK_NAME, TE.START_TIME" + ") TE1 " + "inner join %PREFIX%EXECUTION TE2 ON TE1.TASK_EXECUTION_ID = TE2.TASK_EXECUTION_ID " + "order by TE2.START_TIME DESC, TE2.TASK_EXECUTION_ID DESC"; private static final String FIND_TASK_NAMES = "SELECT distinct TASK_NAME from %PREFIX%EXECUTION order by TASK_NAME"; private static final String FIND_TASK_EXECUTION_BY_JOB_EXECUTION_ID = "SELECT TASK_EXECUTION_ID FROM " + "%PREFIX%TASK_BATCH WHERE JOB_EXECUTION_ID = :jobExecutionId"; private static final String FIND_JOB_EXECUTION_BY_TASK_EXECUTION_ID = "SELECT JOB_EXECUTION_ID " + "FROM %PREFIX%TASK_BATCH WHERE TASK_EXECUTION_ID = :taskExecutionId"; private static final Set validSortColumns = new HashSet<>(10); static { validSortColumns.add("TASK_EXECUTION_ID"); validSortColumns.add("START_TIME"); validSortColumns.add("END_TIME"); validSortColumns.add("TASK_NAME"); validSortColumns.add("EXIT_CODE"); validSortColumns.add("EXIT_MESSAGE"); validSortColumns.add("ERROR_MESSAGE"); validSortColumns.add("LAST_UPDATED"); validSortColumns.add("EXTERNAL_EXECUTION_ID"); validSortColumns.add("PARENT_EXECUTION_ID"); } private final NamedParameterJdbcTemplate jdbcTemplate; private String tablePrefix = TaskProperties.DEFAULT_TABLE_PREFIX; private DataSource dataSource; private LinkedHashMap orderMap; private DataFieldMaxValueIncrementer taskIncrementer; /** * Initializes the JdbcTaskExecutionDao. * @param dataSource used by the dao to execute queries and update the tables. * @param tablePrefix the table prefix to use for this dao. */ public JdbcTaskExecutionDao(DataSource dataSource, String tablePrefix) { this(dataSource); Assert.hasText(tablePrefix, "tablePrefix must not be null nor empty"); this.tablePrefix = tablePrefix; } /** * Initializes the JdbTaskExecutionDao and defaults the table prefix to * {@link TaskProperties#DEFAULT_TABLE_PREFIX}. * @param dataSource used by the dao to execute queries and update the tables. */ public JdbcTaskExecutionDao(DataSource dataSource) { Assert.notNull(dataSource, "The dataSource must not be null."); this.jdbcTemplate = new NamedParameterJdbcTemplate(dataSource); this.dataSource = dataSource; this.orderMap = new LinkedHashMap<>(); this.orderMap.put("START_TIME", Order.DESCENDING); this.orderMap.put("TASK_EXECUTION_ID", Order.DESCENDING); } @Override public TaskExecution createTaskExecution(String taskName, LocalDateTime startTime, List arguments, String externalExecutionId) { return createTaskExecution(taskName, startTime, arguments, externalExecutionId, null); } @Override public TaskExecution createTaskExecution(String taskName, LocalDateTime startTime, List arguments, String externalExecutionId, Long parentExecutionId) { long nextExecutionId = getNextExecutionId(); TaskExecution taskExecution = new TaskExecution(nextExecutionId, null, taskName, startTime, null, null, arguments, null, externalExecutionId, parentExecutionId); final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("taskExecutionId", nextExecutionId, Types.BIGINT) .addValue("exitCode", null, Types.INTEGER) .addValue("startTime", startTime == null ? null : Timestamp.valueOf(startTime), Types.TIMESTAMP) .addValue("taskName", taskName, Types.VARCHAR) .addValue("lastUpdated", Timestamp.valueOf(LocalDateTime.now()), Types.TIMESTAMP) .addValue("externalExecutionId", externalExecutionId, Types.VARCHAR) .addValue("parentExecutionId", parentExecutionId, Types.BIGINT); this.jdbcTemplate.update(getQuery(SAVE_TASK_EXECUTION), queryParameters); insertTaskArguments(nextExecutionId, arguments); return taskExecution; } @Override public TaskExecution startTaskExecution(long executionId, String taskName, LocalDateTime startTime, List arguments, String externalExecutionId) { return startTaskExecution(executionId, taskName, startTime, arguments, externalExecutionId, null); } @Override public TaskExecution startTaskExecution(long executionId, String taskName, LocalDateTime startTime, List arguments, String externalExecutionId, Long parentExecutionId) { TaskExecution taskExecution = new TaskExecution(executionId, null, taskName, startTime, null, null, arguments, null, externalExecutionId, parentExecutionId); final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("startTime", startTime == null ? null : Timestamp.valueOf(startTime), Types.TIMESTAMP) .addValue("exitCode", null, Types.INTEGER) .addValue("taskName", taskName, Types.VARCHAR) .addValue("lastUpdated", Timestamp.valueOf(LocalDateTime.now()), Types.TIMESTAMP) .addValue("parentExecutionId", parentExecutionId, Types.BIGINT) .addValue("taskExecutionId", executionId, Types.BIGINT); String updateString = START_TASK_EXECUTION_PREFIX; if (externalExecutionId == null) { updateString += START_TASK_EXECUTION_SUFFIX; } else { updateString += START_TASK_EXECUTION_EXTERNAL_ID_SUFFIX; queryParameters.addValue("externalExecutionId", externalExecutionId, Types.VARCHAR); } this.jdbcTemplate.update(getQuery(updateString), queryParameters); insertTaskArguments(executionId, arguments); return taskExecution; } @Override public void completeTaskExecution(long taskExecutionId, Integer exitCode, LocalDateTime endTime, String exitMessage, String errorMessage) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource().addValue("taskExecutionId", taskExecutionId, Types.BIGINT); // Check if given TaskExecution's Id already exists, if none is found // it is invalid and an exception should be thrown. if (this.jdbcTemplate.queryForObject(getQuery(CHECK_TASK_EXECUTION_EXISTS), queryParameters, Integer.class) != 1) { throw new IllegalStateException("Invalid TaskExecution, ID " + taskExecutionId + " not found."); } final MapSqlParameterSource parameters = new MapSqlParameterSource() .addValue("endTime", endTime == null ? null : Timestamp.valueOf(endTime), Types.TIMESTAMP) .addValue("exitCode", exitCode, Types.INTEGER) .addValue("exitMessage", exitMessage, Types.VARCHAR) .addValue("errorMessage", errorMessage, Types.VARCHAR) .addValue("lastUpdated", Timestamp.valueOf(LocalDateTime.now()), Types.TIMESTAMP) .addValue("taskExecutionId", taskExecutionId, Types.BIGINT); this.jdbcTemplate.update(getQuery(UPDATE_TASK_EXECUTION), parameters); } @Override public void completeTaskExecution(long taskExecutionId, Integer exitCode, LocalDateTime endTime, String exitMessage) { completeTaskExecution(taskExecutionId, exitCode, endTime, exitMessage, null); } @Override public TaskExecution getTaskExecution(long executionId) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource().addValue("taskExecutionId", executionId, Types.BIGINT); try { TaskExecution taskExecution = this.jdbcTemplate.queryForObject(getQuery(GET_EXECUTION_BY_ID), queryParameters, new TaskExecutionRowMapper()); taskExecution.setArguments(getTaskArguments(executionId)); return taskExecution; } catch (EmptyResultDataAccessException e) { return null; } } @Override public long getTaskExecutionCountByTaskName(String taskName) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource().addValue("taskName", taskName, Types.VARCHAR); try { return this.jdbcTemplate.queryForObject(getQuery(TASK_EXECUTION_COUNT_BY_NAME), queryParameters, Long.class); } catch (EmptyResultDataAccessException e) { return 0; } } @Override public long getRunningTaskExecutionCountByTaskName(String taskName) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource().addValue("taskName", taskName, Types.VARCHAR); try { return this.jdbcTemplate.queryForObject(getQuery(RUNNING_TASK_EXECUTION_COUNT_BY_NAME), queryParameters, Long.class); } catch (EmptyResultDataAccessException e) { return 0; } } @Override public long getRunningTaskExecutionCount() { try { final MapSqlParameterSource queryParameters = new MapSqlParameterSource(); return this.jdbcTemplate.queryForObject(getQuery(RUNNING_TASK_EXECUTION_COUNT), queryParameters, Long.class); } catch (EmptyResultDataAccessException e) { return 0; } } @Override public List getLatestTaskExecutionsByTaskNames(String... taskNames) { Assert.notEmpty(taskNames, "At least 1 task name must be provided."); final List taskNamesAsList = new ArrayList<>(); for (String taskName : taskNames) { if (StringUtils.hasText(taskName)) { taskNamesAsList.add(taskName); } } Assert.isTrue(taskNamesAsList.size() == taskNames.length, String.format("Task names must not contain any empty elements but %s of %s were empty or null.", taskNames.length - taskNamesAsList.size(), taskNames.length)); try { final Map> paramMap = Collections.singletonMap("taskNames", taskNamesAsList); return this.jdbcTemplate.query(getQuery(LAST_TASK_EXECUTIONS_BY_TASK_NAMES), paramMap, new TaskExecutionRowMapper()); } catch (EmptyResultDataAccessException e) { return Collections.emptyList(); } } @Override public TaskExecution getLatestTaskExecutionForTaskName(String taskName) { Assert.hasText(taskName, "The task name must not be empty."); final List taskExecutions = this.getLatestTaskExecutionsByTaskNames(taskName); if (taskExecutions.isEmpty()) { return null; } else if (taskExecutions.size() == 1) { return taskExecutions.get(0); } else { throw new IllegalStateException( "Only expected a single TaskExecution but received " + taskExecutions.size()); } } @Override public long getTaskExecutionCount() { try { return this.jdbcTemplate.queryForObject(getQuery(TASK_EXECUTION_COUNT), new MapSqlParameterSource(), Long.class); } catch (EmptyResultDataAccessException e) { return 0; } } @Override public Page findRunningTaskExecutions(String taskName, Pageable pageable) { return queryForPageableResults(pageable, SELECT_CLAUSE, FROM_CLAUSE, RUNNING_TASK_WHERE_CLAUSE, new MapSqlParameterSource("taskName", taskName), getRunningTaskExecutionCountByTaskName(taskName)); } @Override public Page findTaskExecutionsByExternalExecutionId(String externalExecutionId, Pageable pageable) { return queryForPageableResults(pageable, SELECT_CLAUSE, FROM_CLAUSE, EXTERNAL_EXECUTION_ID_WHERE_CLAUSE, new MapSqlParameterSource("externalExecutionId", externalExecutionId), getTaskExecutionCountByExternalExecutionId(externalExecutionId)); } @Override public long getTaskExecutionCountByExternalExecutionId(String externalExecutionId) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource().addValue("externalExecutionId", externalExecutionId, Types.VARCHAR); try { return this.jdbcTemplate.queryForObject(getQuery(TASK_EXECUTION_COUNT_BY_EXTERNAL_EXECUTION_ID), queryParameters, Long.class); } catch (EmptyResultDataAccessException e) { return 0; } } @Override public Page findTaskExecutionsByName(String taskName, Pageable pageable) { return queryForPageableResults(pageable, SELECT_CLAUSE, FROM_CLAUSE, TASK_NAME_WHERE_CLAUSE, new MapSqlParameterSource("taskName", taskName), getTaskExecutionCountByTaskName(taskName)); } @Override public List getTaskNames() { return this.jdbcTemplate.queryForList(getQuery(FIND_TASK_NAMES), new MapSqlParameterSource(), String.class); } @Override public Page findAll(Pageable pageable) { return queryForPageableResults(pageable, SELECT_CLAUSE, FROM_CLAUSE, null, new MapSqlParameterSource(), getTaskExecutionCount()); } public void setTaskIncrementer(DataFieldMaxValueIncrementer taskIncrementer) { this.taskIncrementer = taskIncrementer; } public long getNextExecutionId() { return this.taskIncrementer.nextLongValue(); } @Override public Long getTaskExecutionIdByJobExecutionId(long jobExecutionId) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource().addValue("jobExecutionId", jobExecutionId, Types.BIGINT); try { return this.jdbcTemplate.queryForObject(getQuery(FIND_TASK_EXECUTION_BY_JOB_EXECUTION_ID), queryParameters, Long.class); } catch (EmptyResultDataAccessException e) { return null; } } @Override public Set getJobExecutionIdsByTaskExecutionId(long taskExecutionId) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource().addValue("taskExecutionId", taskExecutionId, Types.BIGINT); try { return this.jdbcTemplate.query(getQuery(FIND_JOB_EXECUTION_BY_TASK_EXECUTION_ID), queryParameters, new ResultSetExtractor>() { @Override public Set extractData(ResultSet resultSet) throws SQLException, DataAccessException { Set jobExecutionIds = new TreeSet<>(); while (resultSet.next()) { jobExecutionIds.add(resultSet.getLong("JOB_EXECUTION_ID")); } return jobExecutionIds; } }); } catch (DataAccessException e) { return Collections.emptySet(); } } @Override public void updateExternalExecutionId(long taskExecutionId, String externalExecutionId) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("externalExecutionId", externalExecutionId, Types.VARCHAR) .addValue("taskExecutionId", taskExecutionId, Types.BIGINT); if (this.jdbcTemplate.update(getQuery(UPDATE_TASK_EXECUTION_EXTERNAL_EXECUTION_ID), queryParameters) != 1) { throw new IllegalStateException("Invalid TaskExecution, ID " + taskExecutionId + " not found."); } } private Page queryForPageableResults(Pageable pageable, String selectClause, String fromClause, String whereClause, MapSqlParameterSource queryParameters, long totalCount) { SqlPagingQueryProviderFactoryBean factoryBean = new SqlPagingQueryProviderFactoryBean(); factoryBean.setSelectClause(selectClause); factoryBean.setFromClause(fromClause); if (StringUtils.hasText(whereClause)) { factoryBean.setWhereClause(whereClause); } final Sort sort = pageable.getSort(); final LinkedHashMap sortOrderMap = new LinkedHashMap<>(); if (sort != null) { for (Sort.Order sortOrder : sort) { if (validSortColumns.contains(sortOrder.getProperty().toUpperCase(Locale.ROOT))) { sortOrderMap.put(sortOrder.getProperty(), sortOrder.isAscending() ? Order.ASCENDING : Order.DESCENDING); } else { throw new IllegalArgumentException( String.format("Invalid sort option selected: %s", sortOrder.getProperty())); } } } if (!CollectionUtils.isEmpty(sortOrderMap)) { factoryBean.setSortKeys(sortOrderMap); } else { factoryBean.setSortKeys(this.orderMap); } factoryBean.setDataSource(this.dataSource); PagingQueryProvider pagingQueryProvider; try { pagingQueryProvider = factoryBean.getObject(); pagingQueryProvider.init(this.dataSource); } catch (Exception e) { throw new IllegalStateException(e); } String query = pagingQueryProvider.getPageQuery(pageable); List resultList = this.jdbcTemplate.query(getQuery(query), queryParameters, new TaskExecutionRowMapper()); return new PageImpl<>(resultList, pageable, totalCount); } private String getQuery(String base) { return StringUtils.replace(base, "%PREFIX%", this.tablePrefix); } /** * Convenience method that inserts all arguments from the provided task arguments. * @param executionId The executionId to which the arguments are associated. * @param taskArguments The arguments to be stored. */ private void insertTaskArguments(long executionId, List taskArguments) { for (String args : taskArguments) { insertArgument(executionId, args); } } /** * Convenience method that inserts an individual records into the * TASK_EXECUTION_PARAMS table. * @param taskExecutionId id of a task execution * @param taskParam task parameters */ private void insertArgument(long taskExecutionId, String taskParam) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("taskExecutionId", taskExecutionId, Types.BIGINT) .addValue("taskParam", taskParam, Types.VARCHAR); this.jdbcTemplate.update(getQuery(CREATE_TASK_ARGUMENT), queryParameters); } private List getTaskArguments(long taskExecutionId) { final List params = new ArrayList<>(); RowCallbackHandler handler = new RowCallbackHandler() { @Override public void processRow(ResultSet rs) throws SQLException { params.add(rs.getString(2)); } }; this.jdbcTemplate.query(getQuery(FIND_ARGUMENT_FROM_ID), new MapSqlParameterSource("taskExecutionId", taskExecutionId), handler); return params; } /** * Re-usable mapper for {@link TaskExecution} instances. * */ private final class TaskExecutionRowMapper implements RowMapper { private TaskExecutionRowMapper() { } @Override public TaskExecution mapRow(ResultSet rs, int rowNum) throws SQLException { long id = rs.getLong("TASK_EXECUTION_ID"); Long parentExecutionId = rs.getLong("PARENT_EXECUTION_ID"); if (rs.wasNull()) { parentExecutionId = null; } LocalDateTime startTime = null; LocalDateTime endTime = null; try { startTime = rs.getObject("START_TIME", LocalDateTime.class); } catch (NullPointerException npe) { if (!npe.getMessage().contains("")) { throw npe; } } try { endTime = rs.getObject("END_TIME", LocalDateTime.class); } catch (NullPointerException npe) { if (!npe.getMessage().contains("")) { throw npe; } } return new TaskExecution(id, getNullableExitCode(rs), rs.getString("TASK_NAME"), startTime, endTime, rs.getString("EXIT_MESSAGE"), getTaskArguments(id), rs.getString("ERROR_MESSAGE"), rs.getString("EXTERNAL_EXECUTION_ID"), parentExecutionId); } private Integer getNullableExitCode(ResultSet rs) throws SQLException { int exitCode = rs.getInt("EXIT_CODE"); return !rs.wasNull() ? exitCode : null; } } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/dao/MapTaskExecutionDao.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.dao; import java.io.Serializable; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicLong; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** * Stores Task Execution Information to a in-memory map. * * @author Glenn Renfro * @author Gunnar Hillert * @author David Turanski */ public class MapTaskExecutionDao implements TaskExecutionDao { private final AtomicLong currentId = new AtomicLong(0L); private ConcurrentMap taskExecutions; private ConcurrentMap> batchJobAssociations; public MapTaskExecutionDao() { this.taskExecutions = new ConcurrentHashMap<>(); this.batchJobAssociations = new ConcurrentHashMap<>(); } @Override public TaskExecution createTaskExecution(String taskName, LocalDateTime startTime, List arguments, String externalExecutionId) { return createTaskExecution(taskName, startTime, arguments, externalExecutionId, null); } @Override public TaskExecution createTaskExecution(String taskName, LocalDateTime startTime, List arguments, String externalExecutionId, Long parentExecutionId) { long taskExecutionId = getNextExecutionId(); TaskExecution taskExecution = new TaskExecution(taskExecutionId, null, taskName, startTime, null, null, arguments, null, externalExecutionId, parentExecutionId); this.taskExecutions.put(taskExecutionId, taskExecution); return taskExecution; } @Override public TaskExecution startTaskExecution(long executionId, String taskName, LocalDateTime startTime, List arguments, String externalExecutionid) { return startTaskExecution(executionId, taskName, startTime, arguments, externalExecutionid, null); } @Override public TaskExecution startTaskExecution(long executionId, String taskName, LocalDateTime startTime, List arguments, String externalExecutionid, Long parentExecutionId) { TaskExecution taskExecution = this.taskExecutions.get(executionId); taskExecution.setTaskName(taskName); taskExecution.setStartTime(startTime); taskExecution.setArguments(arguments); taskExecution.setParentExecutionId(parentExecutionId); if (externalExecutionid != null) { taskExecution.setExternalExecutionId(externalExecutionid); } return taskExecution; } @Override public void completeTaskExecution(long executionId, Integer exitCode, LocalDateTime endTime, String exitMessage, String errorMessage) { if (!this.taskExecutions.containsKey(executionId)) { throw new IllegalStateException("Invalid TaskExecution, ID " + executionId + " not found."); } TaskExecution taskExecution = this.taskExecutions.get(executionId); taskExecution.setEndTime(endTime); taskExecution.setExitCode(exitCode); taskExecution.setExitMessage(exitMessage); taskExecution.setErrorMessage(errorMessage); } @Override public void completeTaskExecution(long executionId, Integer exitCode, LocalDateTime endTime, String exitMessage) { completeTaskExecution(executionId, exitCode, endTime, exitMessage, null); } @Override public TaskExecution getTaskExecution(long executionId) { return this.taskExecutions.get(executionId); } @Override public long getTaskExecutionCountByTaskName(String taskName) { int count = 0; for (Map.Entry entry : this.taskExecutions.entrySet()) { if (entry.getValue().getTaskName().equals(taskName)) { count++; } } return count; } @Override public long getTaskExecutionCountByExternalExecutionId(String externalExecutionId) { int count = 0; for (Map.Entry entry : this.taskExecutions.entrySet()) { if (entry.getValue().getExternalExecutionId().equals(externalExecutionId)) { count++; } } return count; } @Override public long getRunningTaskExecutionCountByTaskName(String taskName) { int count = 0; for (Map.Entry entry : this.taskExecutions.entrySet()) { if (entry.getValue().getTaskName().equals(taskName) && entry.getValue().getEndTime() == null) { count++; } } return count; } @Override public long getRunningTaskExecutionCount() { long count = 0; for (Map.Entry entry : this.taskExecutions.entrySet()) { if (entry.getValue().getEndTime() == null) { count++; } } return count; } @Override public long getTaskExecutionCount() { return this.taskExecutions.size(); } @Override public Page findRunningTaskExecutions(String taskName, Pageable pageable) { Set result = getTaskExecutionTreeSet(); for (Map.Entry entry : this.taskExecutions.entrySet()) { if (entry.getValue().getTaskName().equals(taskName) && entry.getValue().getEndTime() == null) { result.add(entry.getValue()); } } return getPageFromList(new ArrayList<>(result), pageable, getRunningTaskExecutionCountByTaskName(taskName)); } @Override public Page findTaskExecutionsByExternalExecutionId(String externalExecutionId, Pageable pageable) { Set result = getTaskExecutionTreeSet(); for (Map.Entry entry : this.taskExecutions.entrySet()) { if (entry.getValue().getExternalExecutionId().equals(externalExecutionId)) { result.add(entry.getValue()); } } return getPageFromList(new ArrayList<>(result), pageable, getTaskExecutionCountByExternalExecutionId(externalExecutionId)); } @Override public Page findTaskExecutionsByName(String taskName, Pageable pageable) { Set filteredSet = getTaskExecutionTreeSet(); for (Map.Entry entry : this.taskExecutions.entrySet()) { if (entry.getValue().getTaskName().equals(taskName)) { filteredSet.add(entry.getValue()); } } return getPageFromList(new ArrayList<>(filteredSet), pageable, getTaskExecutionCountByTaskName(taskName)); } @Override public List getTaskNames() { Set result = new TreeSet<>(); for (Map.Entry entry : this.taskExecutions.entrySet()) { result.add(entry.getValue().getTaskName()); } return new ArrayList<>(result); } @Override public Page findAll(Pageable pageable) { TreeSet sortedSet = getTaskExecutionTreeSet(); sortedSet.addAll(this.taskExecutions.values()); List result = new ArrayList<>(sortedSet.descendingSet()); return getPageFromList(result, pageable, getTaskExecutionCount()); } public Map getTaskExecutions() { return Collections.unmodifiableMap(this.taskExecutions); } public long getNextExecutionId() { return this.currentId.getAndIncrement(); } @Override public Long getTaskExecutionIdByJobExecutionId(long jobExecutionId) { Long taskId = null; found: for (Map.Entry> association : this.batchJobAssociations.entrySet()) { for (Long curJobExecutionId : association.getValue()) { if (curJobExecutionId.equals(jobExecutionId)) { taskId = association.getKey(); break found; } } } return taskId; } @Override public Set getJobExecutionIdsByTaskExecutionId(long taskExecutionId) { if (this.batchJobAssociations.containsKey(taskExecutionId)) { return Collections.unmodifiableSet(this.batchJobAssociations.get(taskExecutionId)); } else { return new TreeSet<>(); } } @Override public void updateExternalExecutionId(long taskExecutionId, String externalExecutionId) { TaskExecution taskExecution = this.taskExecutions.get(taskExecutionId); Assert.notNull(taskExecution, "Invalid TaskExecution, ID " + taskExecutionId + " not found."); taskExecution.setExternalExecutionId(externalExecutionId); } public ConcurrentMap> getBatchJobAssociations() { return this.batchJobAssociations; } private TreeSet getTaskExecutionTreeSet() { return new TreeSet<>(new Comparator() { @Override public int compare(TaskExecution e1, TaskExecution e2) { int result = e1.getStartTime().compareTo(e2.getStartTime()); if (result == 0) { result = Long.valueOf(e1.getExecutionId()).compareTo(e2.getExecutionId()); } return result; } }); } private Page getPageFromList(List executionList, Pageable pageable, long maxSize) { long toIndex = (pageable.getOffset() + pageable.getPageSize() > executionList.size()) ? executionList.size() : pageable.getOffset() + pageable.getPageSize(); return new PageImpl<>(executionList.subList((int) pageable.getOffset(), (int) toIndex), pageable, maxSize); } @Override public List getLatestTaskExecutionsByTaskNames(String... taskNames) { Assert.notEmpty(taskNames, "At least 1 task name must be provided."); final List taskNamesAsList = new ArrayList<>(); for (String taskName : taskNames) { if (StringUtils.hasText(taskName)) { taskNamesAsList.add(taskName); } } Assert.isTrue(taskNamesAsList.size() == taskNames.length, String.format("Task names must not contain any empty elements but %s of %s were empty or null.", taskNames.length - taskNamesAsList.size(), taskNames.length)); final Map tempTaskExecutions = new HashMap<>(); for (Map.Entry taskExecutionMapEntry : this.taskExecutions.entrySet()) { if (!taskNamesAsList.contains(taskExecutionMapEntry.getValue().getTaskName())) { continue; } final TaskExecution tempTaskExecution = tempTaskExecutions .get(taskExecutionMapEntry.getValue().getTaskName()); if (tempTaskExecution == null || tempTaskExecution.getStartTime().isBefore(taskExecutionMapEntry.getValue().getStartTime()) || (tempTaskExecution.getStartTime().equals(taskExecutionMapEntry.getValue().getStartTime()) && tempTaskExecution.getExecutionId() < taskExecutionMapEntry.getValue() .getExecutionId())) { tempTaskExecutions.put(taskExecutionMapEntry.getValue().getTaskName(), taskExecutionMapEntry.getValue()); } } final List latestTaskExecutions = new ArrayList<>(tempTaskExecutions.values()); Collections.sort(latestTaskExecutions, new TaskExecutionComparator()); return latestTaskExecutions; } @Override public TaskExecution getLatestTaskExecutionForTaskName(String taskName) { Assert.hasText(taskName, "The task name must not be empty."); final List taskExecutions = this.getLatestTaskExecutionsByTaskNames(taskName); if (taskExecutions.isEmpty()) { return null; } else if (taskExecutions.size() == 1) { return taskExecutions.get(0); } else { throw new IllegalStateException( "Only expected a single TaskExecution but received " + taskExecutions.size()); } } private static final class TaskExecutionComparator implements Comparator, Serializable { @Override public int compare(TaskExecution firstTaskExecution, TaskExecution secondTaskExecution) { if (firstTaskExecution.getStartTime().equals(secondTaskExecution.getStartTime())) { return Long.compare(firstTaskExecution.getExecutionId(), secondTaskExecution.getExecutionId()); } else { return secondTaskExecution.getStartTime().compareTo(firstTaskExecution.getStartTime()); } } } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/dao/TaskExecutionDao.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.dao; import java.time.LocalDateTime; import java.util.List; import java.util.Set; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; /** * Data Access Object for task executions. * * @author Glenn Renfro * @author Gunnar Hillert * @author David Turanski * */ public interface TaskExecutionDao { /** * Save a new {@link TaskExecution}. * @param taskName the name that associated with the task execution. * @param startTime the time task began. * @param arguments list of key/value pairs that configure the task. * @param externalExecutionId id assigned to the task by the platform * @return A fully qualified {@link TaskExecution} instance. */ TaskExecution createTaskExecution(String taskName, LocalDateTime startTime, List arguments, String externalExecutionId); /** * Save a new {@link TaskExecution}. * @param taskName the name that associated with the task execution. * @param startTime the time task began. * @param arguments list of key/value pairs that configure the task. * @param externalExecutionId id assigned to the task by the platform * @param parentExecutionId the parent task execution id. * @return A fully qualified {@link TaskExecution} instance. * @since 1.2.0 */ TaskExecution createTaskExecution(String taskName, LocalDateTime startTime, List arguments, String externalExecutionId, Long parentExecutionId); /** * Update and existing {@link TaskExecution} to mark it as started. * @param executionId the id of the taskExecution to be updated. * @param taskName the name that associated with the task execution. * @param startTime the time task began. * @param arguments list of key/value pairs that configure the task. * @param externalExecutionId id assigned to the task by the platform * @return A TaskExecution containing the information available at task execution * start. * @since 1.1.0 */ TaskExecution startTaskExecution(long executionId, String taskName, LocalDateTime startTime, List arguments, String externalExecutionId); /** * Update and existing {@link TaskExecution} to mark it as started. * @param executionId the id of the taskExecution to be updated. * @param taskName the name that associated with the task execution. * @param startTime the time task began. * @param arguments list of key/value pairs that configure the task. * @param externalExecutionId id assigned to the task by the platform * @param parentExecutionId the parent task execution id. * @return A TaskExecution containing the information available at task execution * start. * @since 1.2.0 */ TaskExecution startTaskExecution(long executionId, String taskName, LocalDateTime startTime, List arguments, String externalExecutionId, Long parentExecutionId); /** * Update and existing {@link TaskExecution} to mark it as completed. * @param executionId the id of the taskExecution to be updated. * @param exitCode the status of the task upon completion. * @param endTime the time the task completed. * @param exitMessage the message assigned to the task upon completion. * @param errorMessage error information available upon failure of a task. * @since 1.1.0 */ void completeTaskExecution(long executionId, Integer exitCode, LocalDateTime endTime, String exitMessage, String errorMessage); /** * Update and existing {@link TaskExecution}. * @param executionId the id of the taskExecution to be updated. * @param exitCode the status of the task upon completion. * @param endTime the time the task completed. * @param exitMessage the message assigned to the task upon completion. */ void completeTaskExecution(long executionId, Integer exitCode, LocalDateTime endTime, String exitMessage); /** * Retrieves a task execution from the task repository. * @param executionId the id associated with the task execution. * @return a fully qualified TaskExecution instance. */ TaskExecution getTaskExecution(long executionId); /** * Retrieves current number of task executions for a taskName. * @param taskName the name of the task to search for in the repository. * @return current number of task executions for the taskName. */ long getTaskExecutionCountByTaskName(String taskName); /** * Retrieves current number of task executions for a taskName and with an endTime of * null. * @param taskName the name of the task to search for in the repository. * @return current number of task executions for the taskName. */ long getRunningTaskExecutionCountByTaskName(String taskName); /** * Retrieves current number of task executions with an endTime of null. * @return current number of task executions. */ long getRunningTaskExecutionCount(); /** * Retrieves current number of task executions. * @return current number of task executions. */ long getTaskExecutionCount(); /** * Retrieves a set of task executions that are running for a taskName. * @param taskName the name of the task to search for in the repository. * @param pageable the constraints for the search. * @return set of running task executions. */ Page findRunningTaskExecutions(String taskName, Pageable pageable); /** * Retrieve a collection of taskExecutions that contain the provided external * execution id. * @param externalExecutionId the external execution id of the tasks * @param pageable the constraints for the search * @return the set of task executions for tasks with the externalExecutionId */ Page findTaskExecutionsByExternalExecutionId(String externalExecutionId, Pageable pageable); /** * Retrieves current number of task executions for a externalTaskExecutionId. * @param externalExecutionId the external execution id of the task to search for in * the repository. * @return current number of task executions for the externalExecutionId. */ long getTaskExecutionCountByExternalExecutionId(String externalExecutionId); /** * Retrieves a subset of task executions by task name, start location and size. * @param taskName the name of the task to search for in the repository. * @param pageable the constraints for the search. * @return a list that contains task executions from the query bound by the start * position and count specified by the user. */ Page findTaskExecutionsByName(String taskName, Pageable pageable); /** * Retrieves a sorted list of distinct task names for the task executions. * @return a list of distinct task names from the task repository.. */ List getTaskNames(); /** * Retrieves all the task executions within the pageable constraints. * @param pageable the constraints for the search * @return page containing the results from the search */ Page findAll(Pageable pageable); /** * Retrieves the next available execution id for a task execution. * @return long containing the executionId. */ long getNextExecutionId(); /** * Returns the id of the TaskExecution that the requested Spring Batch job execution * was executed within the context of. Returns null if non were found. * @param jobExecutionId the id of the JobExecution * @return the id of the {@link TaskExecution} */ Long getTaskExecutionIdByJobExecutionId(long jobExecutionId); /** * Returns the job execution ids associated with a task execution id. * @param taskExecutionId id of the {@link TaskExecution} * @return a Set of the ids of the job executions executed within the * task. */ Set getJobExecutionIdsByTaskExecutionId(long taskExecutionId); /** * Updates the externalExecutionId for the execution id specified. * @param taskExecutionId the execution id for the task to be updated. * @param externalExecutionId the new externalExecutionId. */ void updateExternalExecutionId(long taskExecutionId, String externalExecutionId); /** * Returns a {@link List} of the latest {@link TaskExecution} for 1 or more task * names. * * Latest is defined by the most recent start time. A {@link TaskExecution} does not * have to be finished (The results may including pending {@link TaskExecution}s). * * It is theoretically possible that a {@link TaskExecution} with the same name to * have more than 1 {@link TaskExecution} for the exact same start time. In that case * the {@link TaskExecution} with the highest Task Execution ID is returned. * * This method will not consider end times in its calculations. Thus, when a task * execution {@code A} starts after task execution {@code B} but finishes BEFORE task * execution {@code A}, then task execution {@code B} is being returned. * @param taskNames At least 1 task name must be provided * @return List of TaskExecutions. May be empty but never null. */ List getLatestTaskExecutionsByTaskNames(String... taskNames); /** * Returns the latest task execution for a given task name. Will ultimately apply the * same algorithm underneath as {@link #getLatestTaskExecutionsByTaskNames(String...)} * but will only return a single result. * @param taskName Must not be null or empty * @return The latest Task Execution or null * @see #getLatestTaskExecutionsByTaskNames(String...) */ TaskExecution getLatestTaskExecutionForTaskName(String taskName); } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/dao/package-info.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Interface DAO and default implementations for storing and retrieving data for tasks * from a repository. */ package org.springframework.cloud.task.repository.dao; ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/PagingQueryProvider.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database; import java.util.Map; import javax.sql.DataSource; import org.springframework.batch.infrastructure.item.database.Order; import org.springframework.data.domain.Pageable; /** * Interface defining the functionality to be provided for generating paging queries. * * @author Glenn Renfro */ public interface PagingQueryProvider { /** * Initialize the query provider using the provided {@link DataSource} if necessary. * @param dataSource DataSource to use for any initialization * @throws Exception throws {@link Exception} if query provider initialize fails. */ void init(DataSource dataSource) throws Exception; /** * The number of parameters that are declared in the query. * @return number of parameters */ int getParameterCount(); /** * Indicate whether the generated queries use named parameter syntax. * @return true if named parameter syntax is used */ boolean isUsingNamedParameters(); /** * The sort keys. A Map of the columns that make up the key and a Boolean indicating * ascending or descending (ascending = true). * @return the sort keys used to order the query */ Map getSortKeys(); /** * * Generate the query that will provide the jump to item query. * @param pageable the coordinates to pull the next page from the datasource * @return the generated query */ String getPageQuery(Pageable pageable); } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Database-specific components for Spring Cloud Task repository. */ package org.springframework.cloud.task.repository.database; ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/support/AbstractSqlPagingQueryProvider.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import javax.sql.DataSource; import org.springframework.batch.infrastructure.item.database.JdbcParameterUtils; import org.springframework.batch.infrastructure.item.database.Order; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** * Abstract SQL Paging Query Provider to serve as a base class for all provided SQL paging * query providers. * * Any implementation must provide a way to specify the select clause, from clause and * optionally a where clause. It is recommended that there should be an index for the sort * key to provide better performance. * * Provides properties and preparation for the mandatory "selectClause" and "fromClause" * as well as for the optional "whereClause". * * @author Glenn Renfro */ public abstract class AbstractSqlPagingQueryProvider implements PagingQueryProvider { private String selectClause; private String fromClause; private String whereClause; private Map sortKeys = new LinkedHashMap<>(); private int parameterCount; private boolean usingNamedParameters; /** * @return SQL SELECT clause part of SQL query string */ protected String getSelectClause() { return this.selectClause; } /** * @param selectClause SELECT clause part of SQL query string */ public void setSelectClause(String selectClause) { this.selectClause = removeKeyWord("select", selectClause); } /** * @return SQL FROM clause part of SQL query string */ protected String getFromClause() { return this.fromClause; } /** * @param fromClause FROM clause part of SQL query string */ public void setFromClause(String fromClause) { this.fromClause = removeKeyWord("from", fromClause); } /** * @return SQL WHERE clause part of SQL query string */ protected String getWhereClause() { return this.whereClause; } /** * @param whereClause WHERE clause part of SQL query string */ public void setWhereClause(String whereClause) { if (StringUtils.hasText(whereClause)) { this.whereClause = removeKeyWord("where", whereClause); } else { this.whereClause = null; } } /** * A Map<String, Order> of sort columns as the key and {@link Order} for * ascending/descending. * @return sortKey key to use to sort and limit page content */ @Override public Map getSortKeys() { return this.sortKeys; } /** * @param sortKeys key to use to sort and limit page content */ public void setSortKeys(Map sortKeys) { this.sortKeys = sortKeys; } @Override public int getParameterCount() { return this.parameterCount; } @Override public boolean isUsingNamedParameters() { return this.usingNamedParameters; } @Override public void init(DataSource dataSource) throws Exception { Assert.notNull(dataSource, "DataSource must not be null"); Assert.hasLength(this.selectClause, "selectClause must be specified"); Assert.hasLength(this.fromClause, "fromClause must be specified"); Assert.notEmpty(this.sortKeys, "sortKey must be specified"); StringBuilder sql = new StringBuilder(); sql.append("SELECT ").append(this.selectClause); sql.append(" FROM ").append(this.fromClause); if (this.whereClause != null) { sql.append(" WHERE ").append(this.whereClause); } List namedParameters = new ArrayList<>(); this.parameterCount = JdbcParameterUtils.countParameterPlaceholders(sql.toString(), namedParameters); if (namedParameters.size() > 0) { if (this.parameterCount != namedParameters.size()) { throw new InvalidDataAccessApiUsageException( "You can't use both named parameters and classic \"?\" placeholders: " + sql); } this.usingNamedParameters = true; } } private String removeKeyWord(String keyWord, String clause) { String temp = clause.trim(); String keyWordString = keyWord + " "; if (temp.toLowerCase(Locale.ROOT).startsWith(keyWordString) && temp.length() > keyWordString.length()) { return temp.substring(keyWordString.length()); } else { return temp; } } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/support/Db2PagingQueryProvider.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import org.springframework.data.domain.Pageable; /** * IBM DB2 implementation of a * {@link org.springframework.cloud.task.repository.database.PagingQueryProvider} using * database specific features. * * @author Thomas Schuettel * @author Ryan DCruz */ public class Db2PagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String getPageQuery(Pageable pageable) { long offset = pageable.getOffset() + 1; return generateRowNumSqlQueryWithNesting(getSelectClause(), false, "TMP_ROW_NUM >= " + offset + " AND TMP_ROW_NUM < " + (offset + pageable.getPageSize())); } private String generateRowNumSqlQueryWithNesting(String selectClause, boolean remainingPageQuery, String rowNumClause) { StringBuilder sql = new StringBuilder(); sql.append("SELECT ") .append(selectClause) .append(" FROM (SELECT ") .append(selectClause) .append(", ") .append("ROW_NUMBER() OVER() as TMP_ROW_NUM"); sql.append(" FROM (SELECT ").append(selectClause).append(" FROM ").append(this.getFromClause()); SqlPagingQueryUtils.buildWhereClause(this, remainingPageQuery, sql); sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this)); sql.append(")) WHERE ").append(rowNumClause); return sql.toString(); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/support/H2PagingQueryProvider.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.data.domain.Pageable; /** * H2 implementation of a {@link PagingQueryProvider} using database specific features. * * @author Glenn Renfro * @author Henning Pöttker */ public class H2PagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String getPageQuery(Pageable pageable) { String limitClause = new StringBuilder().append("OFFSET ") .append(pageable.getOffset()) .append(" ROWS FETCH NEXT ") .append(pageable.getPageSize()) .append(" ROWS ONLY") .toString(); return SqlPagingQueryUtils.generateLimitJumpToQuery(this, limitClause); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/support/HsqlPagingQueryProvider.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.data.domain.Pageable; /** * HSQLDB implementation of a {@link PagingQueryProvider} using database specific * features. * * @author Glenn Renfro */ public class HsqlPagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String getPageQuery(Pageable pageable) { String topClause = new StringBuilder().append("LIMIT ") .append(pageable.getOffset()) .append(" ") .append(pageable.getPageSize()) .toString(); return SqlPagingQueryUtils.generateTopJumpToQuery(this, topClause); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/support/MariaDbPagingQueryProvider.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.data.domain.Pageable; /** * MariaDB implementation of a {@link PagingQueryProvider} using database specific * features. * * @author Glenn Renfro */ public class MariaDbPagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String getPageQuery(Pageable pageable) { String topClause = new StringBuilder().append("LIMIT ") .append(pageable.getOffset()) .append(", ") .append(pageable.getPageSize()) .toString(); return SqlPagingQueryUtils.generateLimitJumpToQuery(this, topClause); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/support/MySqlPagingQueryProvider.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.data.domain.Pageable; /** * MySQL implementation of a {@link PagingQueryProvider} using database specific features. * * @author Glenn Renfro */ public class MySqlPagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String getPageQuery(Pageable pageable) { String topClause = new StringBuilder().append("LIMIT ") .append(pageable.getOffset()) .append(", ") .append(pageable.getPageSize()) .toString(); return SqlPagingQueryUtils.generateLimitJumpToQuery(this, topClause); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/support/OraclePagingQueryProvider.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.data.domain.Pageable; /** * Oracle implementation of a {@link PagingQueryProvider} using database specific * features. * * @author Glenn Renfro */ public class OraclePagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String getPageQuery(Pageable pageable) { long offset = pageable.getOffset() + 1; return generateRowNumSqlQueryWithNesting(getSelectClause(), false, "TMP_ROW_NUM >= " + offset + " AND TMP_ROW_NUM < " + (offset + pageable.getPageSize())); } private String generateRowNumSqlQueryWithNesting(String selectClause, boolean remainingPageQuery, String rowNumClause) { StringBuilder sql = new StringBuilder(); sql.append("SELECT ") .append(selectClause) .append(" FROM (SELECT ") .append(selectClause) .append(", ") .append("ROWNUM as TMP_ROW_NUM"); sql.append(" FROM (SELECT ").append(selectClause).append(" FROM ").append(this.getFromClause()); SqlPagingQueryUtils.buildWhereClause(this, remainingPageQuery, sql); sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this)); sql.append(")) WHERE ").append(rowNumClause); return sql.toString(); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/support/PostgresPagingQueryProvider.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.data.domain.Pageable; /** * Postgres implementation of a {@link PagingQueryProvider} using database specific * features. * * @author Glenn Renfro */ public class PostgresPagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String getPageQuery(Pageable pageable) { String limitClause = new StringBuilder().append("LIMIT ") .append(pageable.getPageSize()) .append(" OFFSET ") .append(pageable.getOffset()) .toString(); return SqlPagingQueryUtils.generateLimitJumpToQuery(this, limitClause); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/support/SqlPagingQueryProviderFactoryBean.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import java.util.HashMap; import java.util.Locale; import java.util.Map; import javax.sql.DataSource; import org.springframework.batch.infrastructure.item.database.Order; import org.springframework.beans.factory.FactoryBean; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.cloud.task.repository.support.DatabaseType; import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.util.Assert; import org.springframework.util.StringUtils; import static org.springframework.cloud.task.repository.support.DatabaseType.DB2; import static org.springframework.cloud.task.repository.support.DatabaseType.DB2AS400; import static org.springframework.cloud.task.repository.support.DatabaseType.DB2VSE; import static org.springframework.cloud.task.repository.support.DatabaseType.DB2ZOS; import static org.springframework.cloud.task.repository.support.DatabaseType.H2; import static org.springframework.cloud.task.repository.support.DatabaseType.HSQL; import static org.springframework.cloud.task.repository.support.DatabaseType.MARIADB; import static org.springframework.cloud.task.repository.support.DatabaseType.MYSQL; import static org.springframework.cloud.task.repository.support.DatabaseType.ORACLE; import static org.springframework.cloud.task.repository.support.DatabaseType.POSTGRES; import static org.springframework.cloud.task.repository.support.DatabaseType.SQLSERVER; /** * Factory bean for {@link PagingQueryProvider} interface. The database type will be * determined from the data source if not provided explicitly. Valid types are given by * the {@link DatabaseType} enum. * * @author Glenn Renfro */ public class SqlPagingQueryProviderFactoryBean implements FactoryBean { private DataSource dataSource; private String databaseType; private String fromClause; private String whereClause; private String selectClause; private Map sortKeys; private Map providers = new HashMap<>(); { this.providers.put(HSQL, new HsqlPagingQueryProvider()); this.providers.put(H2, new H2PagingQueryProvider()); this.providers.put(MYSQL, new MySqlPagingQueryProvider()); this.providers.put(MARIADB, new MariaDbPagingQueryProvider()); this.providers.put(POSTGRES, new PostgresPagingQueryProvider()); this.providers.put(ORACLE, new OraclePagingQueryProvider()); this.providers.put(SQLSERVER, new SqlServerPagingQueryProvider()); this.providers.put(DB2, new Db2PagingQueryProvider()); this.providers.put(DB2VSE, new Db2PagingQueryProvider()); this.providers.put(DB2ZOS, new Db2PagingQueryProvider()); this.providers.put(DB2AS400, new Db2PagingQueryProvider()); } /** * @param databaseType the databaseType to set */ public void setDatabaseType(String databaseType) { Assert.hasText(databaseType, "databaseType must not be empty nor null"); this.databaseType = databaseType; } /** * @param dataSource the dataSource to set */ public void setDataSource(DataSource dataSource) { Assert.notNull(dataSource, "dataSource must not be null"); this.dataSource = dataSource; } /** * @param fromClause the fromClause to set */ public void setFromClause(String fromClause) { Assert.hasText(fromClause, "fromClause must not be empty nor null"); this.fromClause = fromClause; } /** * @param whereClause the whereClause to set */ public void setWhereClause(String whereClause) { this.whereClause = whereClause; } /** * @param selectClause the selectClause to set */ public void setSelectClause(String selectClause) { Assert.hasText(selectClause, "selectClause must not be empty nor null"); this.selectClause = selectClause; } /** * @param sortKeys the sortKeys to set */ public void setSortKeys(Map sortKeys) { this.sortKeys = sortKeys; } /** * Get a {@link PagingQueryProvider} instance using the provided properties and * appropriate for the given database type. * * @see FactoryBean#getObject() */ @Override public PagingQueryProvider getObject() throws Exception { DatabaseType type; try { type = this.databaseType != null ? DatabaseType.valueOf(this.databaseType.toUpperCase(Locale.ROOT)) : DatabaseType.fromMetaData(this.dataSource); } catch (MetaDataAccessException e) { throw new IllegalArgumentException( "Could not inspect meta data for database type. You have to supply it explicitly.", e); } AbstractSqlPagingQueryProvider provider = this.providers.get(type); Assert.state(provider != null, "Should not happen: missing PagingQueryProvider for DatabaseType=" + type); provider.setFromClause(this.fromClause); provider.setWhereClause(this.whereClause); provider.setSortKeys(this.sortKeys); if (StringUtils.hasText(this.selectClause)) { provider.setSelectClause(this.selectClause); } provider.init(this.dataSource); return provider; } /** * Always returns {@link PagingQueryProvider}. * * @see FactoryBean#getObjectType() */ @Override public Class getObjectType() { return PagingQueryProvider.class; } /** * Always returns true. * * @see FactoryBean#isSingleton() */ @Override public boolean isSingleton() { return true; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/support/SqlPagingQueryUtils.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import java.util.Map; import org.springframework.batch.infrastructure.item.database.Order; /** * Utility class that generates the actual SQL statements used by query providers. * * @author Glenn Renfro */ public final class SqlPagingQueryUtils { private SqlPagingQueryUtils() { } /** * Generate SQL query string using a LIMIT clause. * @param provider {@link AbstractSqlPagingQueryProvider} providing the implementation * specifics * @param limitClause the implementation specific top clause to be used * @return the generated query */ public static String generateLimitJumpToQuery(AbstractSqlPagingQueryProvider provider, String limitClause) { StringBuilder sql = new StringBuilder(); sql.append("SELECT ").append(provider.getSelectClause()); sql.append(" FROM ").append(provider.getFromClause()); sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); sql.append(" ORDER BY ").append(buildSortClause(provider)); sql.append(" ").append(limitClause); return sql.toString(); } /** * Generate SQL query string using a TOP clause. * @param provider {@link AbstractSqlPagingQueryProvider} providing the implementation * specifics * @param topClause the implementation specific top clause to be used * @return the generated query */ public static String generateTopJumpToQuery(AbstractSqlPagingQueryProvider provider, String topClause) { StringBuilder sql = new StringBuilder(); sql.append("SELECT ").append(topClause).append(" ").append(provider.getSelectClause()); sql.append(" FROM ").append(provider.getFromClause()); sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); sql.append(" ORDER BY ").append(buildSortClause(provider)); return sql.toString(); } /** * Generates WHERE clause for queries that require sub selects. * @param provider the paging query provider that will provide the base where clause * @param remainingPageQuery if true assumes more will be appended to where clause * @param sql the sql statement to be appended. */ public static void buildWhereClause(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, StringBuilder sql) { if (remainingPageQuery) { sql.append(" WHERE "); if (provider.getWhereClause() != null) { sql.append("("); sql.append(provider.getWhereClause()); sql.append(") AND "); } } else { sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); } } /** * Generates ORDER BY attributes based on the sort keys. * @param provider {@link AbstractSqlPagingQueryProvider} providing the implementation * specifics * @return a String that can be appended to an ORDER BY clause. */ public static String buildSortClause(AbstractSqlPagingQueryProvider provider) { return buildSortClause(provider.getSortKeys()); } /** * Generates ORDER BY attributes based on the sort keys. * @param sortKeys generates order by clause from map * @return a String that can be appended to an ORDER BY clause. */ public static String buildSortClause(Map sortKeys) { StringBuilder builder = new StringBuilder(); String prefix = ""; for (Map.Entry sortKey : sortKeys.entrySet()) { builder.append(prefix); prefix = ", "; builder.append(sortKey.getKey()); if (sortKey.getValue() != null && sortKey.getValue() == Order.DESCENDING) { builder.append(" DESC"); } else { builder.append(" ASC"); } } return builder.toString(); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/support/SqlServerPagingQueryProvider.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.data.domain.Pageable; /** * Sql Server implementation of a {@link PagingQueryProvider} using database specific * features. * * @author Glenn Renfro */ public class SqlServerPagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String getPageQuery(Pageable pageable) { long offset = pageable.getOffset() + 1; return generateRowNumSqlQueryWithNesting(getSelectClause(), false, "TMP_ROW_NUM >= " + offset + " AND TMP_ROW_NUM < " + (offset + pageable.getPageSize())); } private String generateRowNumSqlQueryWithNesting(String selectClause, boolean remainingPageQuery, String rowNumClause) { StringBuilder sql = new StringBuilder(); sql.append("SELECT ") .append(selectClause) .append(" FROM (SELECT ") .append(selectClause) .append(", ") .append("ROW_NUMBER() OVER (ORDER BY ") .append(SqlPagingQueryUtils.buildSortClause(this)) .append(") AS TMP_ROW_NUM ") .append(" FROM ") .append(getFromClause()); SqlPagingQueryUtils.buildWhereClause(this, remainingPageQuery, sql); sql.append(") TASK_EXECUTION_PAGE "); sql.append(" WHERE ").append(rowNumClause); sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this)); return sql.toString(); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/database/support/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Support classes for database-specific Spring Cloud Task repository implementations. */ package org.springframework.cloud.task.repository.database.support; ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Core repository interfaces and classes for Spring Cloud Task. */ package org.springframework.cloud.task.repository; ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/support/DatabaseType.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import java.sql.DatabaseMetaData; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import javax.sql.DataSource; import org.springframework.jdbc.support.DatabaseMetaDataCallback; import org.springframework.jdbc.support.JdbcUtils; import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.util.StringUtils; /** * Enum representing a database type, such as DB2 or oracle. The type also contains a * product name, which is expected to be the same as the product name provided by the * database driver's metadata. * * @author Glenn Renfro */ public enum DatabaseType { /** * HSQL DB. */ HSQL("HSQL Database Engine"), /** * H2 DB. */ H2("H2"), /** * Oracle DB. */ ORACLE("Oracle"), /** * MySQL DB. */ MYSQL("MySQL"), /** * MySQL DB. */ MARIADB("MariaDB"), /** * PostgreSQL DB. */ POSTGRES("PostgreSQL"), /** * Microsoft SQL Server DB. */ SQLSERVER("Microsoft SQL Server"), /** * DB2 DB. */ DB2("DB2"), /** * DB2VSE DB. */ DB2VSE("DB2VSE"), /** * DB2ZOS DB. */ DB2ZOS("DB2ZOS"), /** * DB2AS400 DB. */ DB2AS400("DB2AS400"); private static final Map dbNameMap; static { dbNameMap = new HashMap<>(); for (DatabaseType type : values()) { dbNameMap.put(type.getProductName(), type); } } private final String productName; DatabaseType(String productName) { this.productName = productName; } /** * Convenience method that pulls a database product name from the DataSource's * metadata. * @param dataSource the datasource used to extact metadata. * @return DatabaseType The database type associated with the datasource. * @throws MetaDataAccessException thrown if failure occurs on metadata lookup. */ public static DatabaseType fromMetaData(DataSource dataSource) throws SQLException, MetaDataAccessException { String databaseProductName = JdbcUtils.extractDatabaseMetaData(dataSource, new DatabaseMetaDataCallback() { @Override public Object processMetaData(DatabaseMetaData dbmd) throws SQLException, MetaDataAccessException { return dbmd.getDatabaseProductName(); } }).toString(); if (StringUtils.hasText(databaseProductName) && !databaseProductName.equals("DB2/Linux") && databaseProductName.startsWith("DB2")) { String databaseProductVersion = JdbcUtils .extractDatabaseMetaData(dataSource, new DatabaseMetaDataCallback() { @Override public Object processMetaData(DatabaseMetaData dbmd) throws SQLException, MetaDataAccessException { return dbmd.getDatabaseProductVersion(); } }) .toString(); if (databaseProductVersion.startsWith("ARI")) { databaseProductName = "DB2VSE"; } else if (databaseProductVersion.startsWith("DSN")) { databaseProductName = "DB2ZOS"; } else if (databaseProductName.indexOf("AS") != -1 && (databaseProductVersion.startsWith("QSQ") || databaseProductVersion.substring(databaseProductVersion.indexOf('V')) .matches("V\\dR\\d[mM]\\d"))) { databaseProductName = "DB2AS400"; } else { databaseProductName = JdbcUtils.commonDatabaseName(databaseProductName); } } else { if (!databaseProductName.equals(MARIADB.getProductName())) { databaseProductName = JdbcUtils.commonDatabaseName(databaseProductName); } } return fromProductName(databaseProductName); } /** * Static method to obtain a DatabaseType from the provided product name. * @param productName the name of the database. * @return DatabaseType for given product name. * @throws IllegalArgumentException if none is found. */ public static DatabaseType fromProductName(String productName) { if (!dbNameMap.containsKey(productName)) { throw new IllegalArgumentException("DatabaseType not found for product name: [" + productName + "]"); } else { return dbNameMap.get(productName); } } private String getProductName() { return this.productName; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/support/SimpleTaskExplorer.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import java.util.List; import java.util.Set; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.util.Assert; /** * TaskExplorer for that gathers task information from a task repository. * * @author Glenn Renfro * @author Michael Minella * @author Gunnar Hillert * @author David Turanski */ public class SimpleTaskExplorer implements TaskExplorer { private TaskExecutionDao taskExecutionDao; public SimpleTaskExplorer(TaskExecutionDaoFactoryBean taskExecutionDaoFactoryBean) { Assert.notNull(taskExecutionDaoFactoryBean, "taskExecutionDaoFactoryBean must not be null"); try { this.taskExecutionDao = taskExecutionDaoFactoryBean.getObject(); } catch (Exception e) { throw new IllegalStateException("Unable to create a TaskExecutionDao", e); } } @Override public TaskExecution getTaskExecution(long executionId) { return this.taskExecutionDao.getTaskExecution(executionId); } @Override public Page findRunningTaskExecutions(String taskName, Pageable pageable) { return this.taskExecutionDao.findRunningTaskExecutions(taskName, pageable); } @Override public Page findTaskExecutionsByExecutionId(String externalExecutionId, Pageable pageable) { return this.taskExecutionDao.findTaskExecutionsByExternalExecutionId(externalExecutionId, pageable); } @Override public List getTaskNames() { return this.taskExecutionDao.getTaskNames(); } @Override public long getTaskExecutionCountByTaskName(String taskName) { return this.taskExecutionDao.getTaskExecutionCountByTaskName(taskName); } @Override public long getTaskExecutionCount() { return this.taskExecutionDao.getTaskExecutionCount(); } @Override public long getRunningTaskExecutionCount() { return this.taskExecutionDao.getRunningTaskExecutionCount(); } @Override public long getTaskExecutionCountByExternalExecutionId(String externalExecutionId) { return this.taskExecutionDao.getTaskExecutionCountByExternalExecutionId(externalExecutionId); } @Override public Page findTaskExecutionsByName(String taskName, Pageable pageable) { return this.taskExecutionDao.findTaskExecutionsByName(taskName, pageable); } @Override public Page findAll(Pageable pageable) { return this.taskExecutionDao.findAll(pageable); } @Override public Long getTaskExecutionIdByJobExecutionId(long jobExecutionId) { return this.taskExecutionDao.getTaskExecutionIdByJobExecutionId(jobExecutionId); } @Override public Set getJobExecutionIdsByTaskExecutionId(long taskExecutionId) { return this.taskExecutionDao.getJobExecutionIdsByTaskExecutionId(taskExecutionId); } @Override public List getLatestTaskExecutionsByTaskNames(String... taskNames) { return this.taskExecutionDao.getLatestTaskExecutionsByTaskNames(taskNames); } @Override public TaskExecution getLatestTaskExecutionForTaskName(String taskName) { return this.taskExecutionDao.getLatestTaskExecutionForTaskName(taskName); } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/support/SimpleTaskNameResolver.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import org.springframework.beans.BeansException; import org.springframework.beans.factory.annotation.Value; import org.springframework.cloud.task.repository.TaskNameResolver; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.util.StringUtils; /** * Simple implementation of the {@link TaskNameResolver} interface. Names the task based * on the following order of precedence: *
    *
  1. A configured property spring.cloud.task.name
  2. *
  3. The {@link ApplicationContext}'s id.
  4. *
* * @author Michael Minella * @see org.springframework.boot.context.ContextIdApplicationContextInitializer */ public class SimpleTaskNameResolver implements TaskNameResolver, ApplicationContextAware { private ApplicationContext context; private String configuredName; @Value("${spring.cloud.task.name:}") public void setConfiguredName(String configuredName) { this.configuredName = configuredName; } @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { this.context = applicationContext; } @Override public String getTaskName() { if (StringUtils.hasText(this.configuredName)) { return this.configuredName; } else { return this.context.getId().replace(":", "_"); } } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/support/SimpleTaskRepository.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import java.time.LocalDateTime; import java.util.Collections; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.FactoryBean; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; import org.springframework.util.Assert; /** * Records the task execution information to the log and to TaskExecutionDao provided. * * @author Glenn Renfro */ public class SimpleTaskRepository implements TaskRepository { /** * Default max exit message size. */ public static final int MAX_EXIT_MESSAGE_SIZE = 2500; /** * Default max task name size. */ public static final int MAX_TASK_NAME_SIZE = 100; /** * Default max error message size. */ public static final int MAX_ERROR_MESSAGE_SIZE = 2500; private static final Log logger = LogFactory.getLog(SimpleTaskRepository.class); private TaskExecutionDao taskExecutionDao; private FactoryBean taskExecutionDaoFactoryBean; private boolean initialized = false; private int maxExitMessageSize = MAX_EXIT_MESSAGE_SIZE; private int maxTaskNameSize = MAX_TASK_NAME_SIZE; private int maxErrorMessageSize = MAX_ERROR_MESSAGE_SIZE; public SimpleTaskRepository(FactoryBean taskExecutionDaoFactoryBean) { Assert.notNull(taskExecutionDaoFactoryBean, "A FactoryBean that provides a TaskExecutionDao is required"); this.taskExecutionDaoFactoryBean = taskExecutionDaoFactoryBean; } public SimpleTaskRepository(FactoryBean taskExecutionDaoFactoryBean, Integer maxExitMessageSize, Integer maxTaskNameSize, Integer maxErrorMessageSize) { Assert.notNull(taskExecutionDaoFactoryBean, "A FactoryBean that provides a TaskExecutionDao is required"); if (maxTaskNameSize != null) { this.maxTaskNameSize = maxTaskNameSize; } if (maxExitMessageSize != null) { this.maxExitMessageSize = maxExitMessageSize; } if (maxErrorMessageSize != null) { this.maxErrorMessageSize = maxErrorMessageSize; } this.taskExecutionDaoFactoryBean = taskExecutionDaoFactoryBean; } @Override public TaskExecution completeTaskExecution(long executionId, Integer exitCode, LocalDateTime endTime, String exitMessage) { return completeTaskExecution(executionId, exitCode, endTime, exitMessage, null); } @Override public TaskExecution completeTaskExecution(long executionId, Integer exitCode, LocalDateTime endTime, String exitMessage, String errorMessage) { initialize(); validateCompletedTaskExitInformation(executionId, exitCode, endTime); exitMessage = trimMessage(exitMessage, this.maxExitMessageSize); errorMessage = trimMessage(errorMessage, this.maxErrorMessageSize); this.taskExecutionDao.completeTaskExecution(executionId, exitCode, endTime, exitMessage, errorMessage); logger.debug("Updating: TaskExecution with executionId=" + executionId + " with the following {" + "exitCode=" + exitCode + ", endTime=" + endTime + ", exitMessage='" + exitMessage + '\'' + ", errorMessage='" + errorMessage + '\'' + '}'); return this.taskExecutionDao.getTaskExecution(executionId); } @Override public TaskExecution createTaskExecution(TaskExecution taskExecution) { initialize(); validateCreateInformation(taskExecution); TaskExecution daoTaskExecution = this.taskExecutionDao.createTaskExecution(taskExecution.getTaskName(), taskExecution.getStartTime(), taskExecution.getArguments(), taskExecution.getExternalExecutionId(), taskExecution.getParentExecutionId()); logger.debug("Creating: " + taskExecution.toString()); return daoTaskExecution; } @Override public TaskExecution createTaskExecution(String name) { initialize(); TaskExecution taskExecution = this.taskExecutionDao.createTaskExecution(name, null, Collections.emptyList(), null); logger.debug("Creating: " + taskExecution.toString()); return taskExecution; } @Override public TaskExecution createTaskExecution() { return createTaskExecution((String) null); } @Override public TaskExecution startTaskExecution(long executionid, String taskName, LocalDateTime startTime, List arguments, String externalExecutionId) { return startTaskExecution(executionid, taskName, startTime, arguments, externalExecutionId, null); } @Override public void updateExternalExecutionId(long executionid, String externalExecutionId) { initialize(); this.taskExecutionDao.updateExternalExecutionId(executionid, externalExecutionId); } @Override public TaskExecution startTaskExecution(long executionid, String taskName, LocalDateTime startTime, List arguments, String externalExecutionId, Long parentExecutionId) { initialize(); TaskExecution taskExecution = this.taskExecutionDao.startTaskExecution(executionid, taskName, startTime, arguments, externalExecutionId, parentExecutionId); logger.debug("Starting: " + taskExecution.toString()); return taskExecution; } /** * Retrieves the taskExecutionDao associated with this repository. * @return the taskExecutionDao */ public TaskExecutionDao getTaskExecutionDao() { initialize(); return this.taskExecutionDao; } private void initialize() { if (!this.initialized) { try { this.taskExecutionDao = this.taskExecutionDaoFactoryBean.getObject(); this.initialized = true; } catch (Exception e) { throw new IllegalStateException("Unable to create the TaskExecutionDao", e); } } } /** * Validate startTime and taskName are valid. * @param taskExecution task execution to validate */ private void validateCreateInformation(TaskExecution taskExecution) { Assert.notNull(taskExecution.getStartTime(), "TaskExecution start time cannot be null."); if (taskExecution.getTaskName() != null && taskExecution.getTaskName().length() > this.maxTaskNameSize) { throw new IllegalArgumentException("TaskName length exceeds " + this.maxTaskNameSize + " characters"); } } private void validateCompletedTaskExitInformation(long executionId, Integer exitCode, LocalDateTime endTime) { Assert.notNull(exitCode, "exitCode should not be null"); Assert.isTrue(exitCode >= 0, "exit code must be greater than or equal to zero"); Assert.notNull(endTime, "TaskExecution endTime cannot be null."); } private String trimMessage(String exitMessage, int maxSize) { String result = exitMessage; if (exitMessage != null && exitMessage.length() > maxSize) { result = exitMessage.substring(0, maxSize); } return result; } public void setMaxExitMessageSize(int maxExitMessageSize) { this.maxExitMessageSize = maxExitMessageSize; } public void setMaxTaskNameSize(int maxTaskNameSize) { this.maxTaskNameSize = maxTaskNameSize; } public void setMaxErrorMessageSize(int maxErrorMessageSize) { this.maxErrorMessageSize = maxErrorMessageSize; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/support/TaskExecutionDaoFactoryBean.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; import javax.sql.DataSource; import org.springframework.batch.infrastructure.item.database.support.DataFieldMaxValueIncrementerFactory; import org.springframework.batch.infrastructure.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; import org.springframework.beans.factory.FactoryBean; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.listener.TaskException; import org.springframework.cloud.task.repository.dao.JdbcTaskExecutionDao; import org.springframework.cloud.task.repository.dao.MapTaskExecutionDao; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; import org.springframework.jdbc.support.incrementer.SqlServerSequenceMaxValueIncrementer; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** * A {@link FactoryBean} implementation that creates the appropriate * {@link TaskExecutionDao} based on the provided information. * * @author Michael Minella * @author Glenn Renfro */ public class TaskExecutionDaoFactoryBean implements FactoryBean { private DataSource dataSource; private TaskExecutionDao dao = null; private String tablePrefix = TaskProperties.DEFAULT_TABLE_PREFIX; /** * Default constructor will result in a Map based TaskExecutionDao. This is only * intended for testing purposes. */ public TaskExecutionDaoFactoryBean() { } /** * {@link DataSource} to be used. * @param dataSource {@link DataSource} to be used. * @param tablePrefix the table prefix to use for this dao. */ public TaskExecutionDaoFactoryBean(DataSource dataSource, String tablePrefix) { this(dataSource); Assert.hasText(tablePrefix, "tablePrefix must not be null nor empty"); this.tablePrefix = tablePrefix; } /** * {@link DataSource} to be used. * @param dataSource {@link DataSource} to be used. */ public TaskExecutionDaoFactoryBean(DataSource dataSource) { Assert.notNull(dataSource, "A DataSource is required"); this.dataSource = dataSource; } @Override public TaskExecutionDao getObject() throws Exception { if (this.dao == null) { if (this.dataSource != null) { buildTaskExecutionDao(this.dataSource); } else { this.dao = new MapTaskExecutionDao(); } } if (this.dataSource != null) { String databaseType = null; try { databaseType = DatabaseType.fromMetaData(dataSource).name(); } catch (MetaDataAccessException e) { throw new IllegalStateException(e); } if (StringUtils.hasText(databaseType) && databaseType.equals("SQLSERVER")) { String incrementerName = this.tablePrefix + "SEQ"; DataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory( dataSource); DataFieldMaxValueIncrementer incrementer = incrementerFactory.getIncrementer(databaseType, incrementerName); if (!isSqlServerTableSequenceAvailable(incrementerName)) { incrementer = new SqlServerSequenceMaxValueIncrementer(dataSource, this.tablePrefix + "SEQ"); } ((JdbcTaskExecutionDao) this.dao).setTaskIncrementer(incrementer); } } return this.dao; } @Override public Class getObjectType() { return TaskExecutionDao.class; } @Override public boolean isSingleton() { return true; } private void buildTaskExecutionDao(DataSource dataSource) { DataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory( dataSource); this.dao = new JdbcTaskExecutionDao(dataSource, this.tablePrefix); String databaseType; try { databaseType = DatabaseType.fromMetaData(dataSource).name(); } catch (MetaDataAccessException e) { throw new IllegalStateException(e); } catch (SQLException e) { throw new IllegalStateException(e); } ((JdbcTaskExecutionDao) this.dao) .setTaskIncrementer(incrementerFactory.getIncrementer(databaseType, this.tablePrefix + "SEQ")); } private boolean isSqlServerTableSequenceAvailable(String incrementerName) { boolean result = false; DatabaseMetaData metaData = null; try { metaData = dataSource.getConnection().getMetaData(); String[] types = { "TABLE" }; ResultSet tables = metaData.getTables(null, null, "%", types); while (tables.next()) { if (tables.getString("TABLE_NAME").equals(incrementerName)) { result = true; break; } } } catch (SQLException sqe) { throw new TaskException(sqe.getMessage()); } return result; } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/support/TaskRepositoryInitializer.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import java.sql.SQLException; import java.util.Locale; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.core.io.ResourceLoader; import org.springframework.jdbc.datasource.init.DatabasePopulatorUtils; import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; import org.springframework.jdbc.support.JdbcUtils; import org.springframework.jdbc.support.MetaDataAccessException; /** * Utility for initializing the Task Repository's datasource. If a single * {@link DataSource} is available in the current context, and functionality is enabled * (as it is by default), this will initialize the database. If more than one DataSource * is available in the current context, custom configuration of this is required (if * desired). * * Initialization of the database can be disabled by configuring the property * spring.cloud.task.initialize-enabled to false. * spring.cloud.task.initialize.enable has been deprecated. * * @author Glenn Renfro * @author Michael Minella */ public final class TaskRepositoryInitializer implements InitializingBean { private static final Log logger = LogFactory.getLog(TaskRepositoryInitializer.class); private static final String DEFAULT_SCHEMA_LOCATION = "classpath:org/springframework/" + "cloud/task/schema-@@platform@@.sql"; /** * Path to the SQL file to use to initialize the database schema. */ private static String schema = DEFAULT_SCHEMA_LOCATION; private DataSource dataSource; private ResourceLoader resourceLoader; @Value("${spring.cloud.task.initialize.enable:true}") private boolean taskInitializationEnabled; private TaskProperties taskProperties; public TaskRepositoryInitializer(TaskProperties taskProperties) { this.taskProperties = taskProperties; } public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } @Autowired(required = false) public void setResourceLoader(ResourceLoader resourceLoader) { this.resourceLoader = resourceLoader; } private String getDatabaseType(DataSource dataSource) { try { return JdbcUtils.commonDatabaseName(DatabaseType.fromMetaData(dataSource).toString()) .toLowerCase(Locale.ROOT); } catch (MetaDataAccessException ex) { throw new IllegalStateException("Unable to detect database type", ex); } catch (SQLException ex) { throw new IllegalStateException("Unable to detect database type", ex); } } @Override public void afterPropertiesSet() throws Exception { boolean isInitializeEnabled = (this.taskProperties.isInitializeEnabled() != null) ? this.taskProperties.isInitializeEnabled() : this.taskInitializationEnabled; if (this.dataSource != null && isInitializeEnabled && this.taskProperties.getTablePrefix().equals(TaskProperties.DEFAULT_TABLE_PREFIX)) { String platform = getDatabaseType(this.dataSource); if ("hsql".equals(platform)) { platform = "hsqldb"; } if ("postgres".equals(platform)) { platform = "postgresql"; } if ("oracle".equals(platform)) { platform = "oracle"; } if ("mysql".equals(platform)) { platform = "mysql"; } if ("sqlserver".equals(platform)) { platform = "sqlserver"; } ResourceDatabasePopulator populator = new ResourceDatabasePopulator(); String schemaLocation = schema; schemaLocation = schemaLocation.replace("@@platform@@", platform); populator.addScript(this.resourceLoader.getResource(schemaLocation)); populator.setContinueOnError(true); logger.debug(String.format("Initializing task schema for %s database", platform)); DatabasePopulatorUtils.execute(populator, this.dataSource); } } } ================================================ FILE: spring-cloud-task-core/src/main/java/org/springframework/cloud/task/repository/support/package-info.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Classes used for setting up and supporting a task repositories. */ package org.springframework.cloud.task.repository.support; ================================================ FILE: spring-cloud-task-core/src/main/resources/META-INF/additional-spring-configuration-metadata.json ================================================ { "properties": [ { "defaultValue": false, "name": "spring.cloud.task.single-instance-enabled", "description": "This property is used to determine if a task will execute if another task with the same app name is running.", "type": "java.lang.Boolean" },{ "defaultValue": "springCloudTaskTransactionManager", "name": "spring.cloud.task.transaction-manager", "description": "This property is used to specify the transaction manager for TaskRepository. By default, a dedicated transaction manager is created by spring.", "type": "java.lang.String" } ] } ================================================ FILE: spring-cloud-task-core/src/main/resources/META-INF/spring/aot.factories ================================================ org.springframework.aot.hint.RuntimeHintsRegistrar=\ org.springframework.cloud.task.configuration.TaskRuntimeHints ================================================ FILE: spring-cloud-task-core/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports ================================================ org.springframework.cloud.task.configuration.SingleTaskConfiguration org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration org.springframework.cloud.task.configuration.observation.ObservationTaskAutoConfiguration ================================================ FILE: spring-cloud-task-core/src/main/resources/META-INF/spring.factories ================================================ org.springframework.boot.sql.init.dependency.DatabaseInitializerDetector=\ org.springframework.cloud.task.configuration.TaskRepositoryDatabaseInitializerDetector org.springframework.boot.sql.init.dependency.DependsOnDatabaseInitializationDetector=\ org.springframework.cloud.task.configuration.TaskRepositoryDependsOnDatabaseInitializationDetector ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.1.x/migration-h2.sql ================================================ /* If migrating from 1.1.0.M1 to 1.1.0.RELEASE you do not need to add the ERROR_MESSAGE column. */ alter table TASK_EXECUTION add ERROR_MESSAGE VARCHAR(2500); alter table TASK_EXECUTION add EXTERNAL_EXECUTION_ID VARCHAR(255); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.1.x/migration-hsqldb.sql ================================================ /* If migrating from 1.1.0.M1 to 1.1.0.RELEASE you do not need to add the ERROR_MESSAGE column. */ alter table TASK_EXECUTION add ERROR_MESSAGE VARCHAR(2500); alter table TASK_EXECUTION add EXTERNAL_EXECUTION_ID VARCHAR(255); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.1.x/migration-mysql.sql ================================================ /* If migrating from 1.1.0.M1 to 1.1.0.RELEASE you do not need to add the ERROR_MESSAGE column. */ alter table TASK_EXECUTION add ERROR_MESSAGE VARCHAR(2500); alter table TASK_EXECUTION add EXTERNAL_EXECUTION_ID VARCHAR(255); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.1.x/migration-oracle.sql ================================================ /* If migrating from 1.1.0.M1 to 1.1.0.RELEASE you do not need to add the ERROR_MESSAGE column. */ alter table TASK_EXECUTION add ERROR_MESSAGE varchar2(2500); alter table TASK_EXECUTION add EXTERNAL_EXECUTION_ID varchar2(255); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.1.x/migration-postgresql.sql ================================================ /* If migrating from 1.1.0.M1 to 1.1.0.RELEASE you do not need to add the ERROR_MESSAGE column. */ alter table TASK_EXECUTION add ERROR_MESSAGE VARCHAR(2500); alter table TASK_EXECUTION add EXTERNAL_EXECUTION_ID VARCHAR(255); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.1.x/migration-sqlserver.sql ================================================ /* If migrating from 1.1.0.M1 to 1.1.0.RELEASE you do not need to add the ERROR_MESSAGE column. */ alter table TASK_EXECUTION add ERROR_MESSAGE VARCHAR(2500); alter table TASK_EXECUTION add EXTERNAL_EXECUTION_ID VARCHAR(255); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.2.x/migration-db2.sql ================================================ alter table TASK_EXECUTION add PARENT_EXECUTION_ID BIGINT; ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.2.x/migration-h2.sql ================================================ alter table TASK_EXECUTION add PARENT_EXECUTION_ID BIGINT; ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.2.x/migration-hsqldb.sql ================================================ alter table TASK_EXECUTION add PARENT_EXECUTION_ID BIGINT; ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.2.x/migration-mysql.sql ================================================ alter table TASK_EXECUTION add PARENT_EXECUTION_ID BIGINT; ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.2.x/migration-oracle.sql ================================================ alter table TASK_EXECUTION add PARENT_EXECUTION_ID NUMBER; ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.2.x/migration-postgresql.sql ================================================ alter table TASK_EXECUTION add PARENT_EXECUTION_ID BIGINT; ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/1.2.x/migration-sqlserver.sql ================================================ alter table TASK_EXECUTION add PARENT_EXECUTION_ID BIGINT; ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/2.2.x/migration-oracle.sql ================================================ alter table TASK_LOCK MODIFY LOCK_KEY VARCHAR2(36); alter table TASK_LOCK MODIFY CLIENT_ID VARCHAR2(36); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/3.0.x/migration-db2.sql ================================================ ALTER TABLE TASK_EXECUTION ALTER COLUMN START_TIME SET DATA TYPE TIMESTAMP(9); ALTER TABLE TASK_EXECUTION ALTER COLUMN END_TIME SET DATA TYPE TIMESTAMP(9); ALTER TABLE TASK_EXECUTION ALTER COLUMN LAST_UPDATED SET DATA TYPE TIMESTAMP(9); ALTER TABLE TASK_LOCK ALTER COLUMN CREATED_DATE SET DATA TYPE TIMESTAMP(9); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/3.0.x/migration-h2.sql ================================================ ALTER TABLE TASK_EXECUTION ALTER COLUMN START_TIME SET DATA TYPE TIMESTAMP(9); ALTER TABLE TASK_EXECUTION ALTER COLUMN END_TIME SET DATA TYPE TIMESTAMP(9); ALTER TABLE TASK_EXECUTION ALTER COLUMN LAST_UPDATED SET DATA TYPE TIMESTAMP(9); ALTER TABLE TASK_LOCK ALTER COLUMN CREATED_DATE SET DATA TYPE TIMESTAMP(9); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/3.0.x/migration-hsqldb.sql ================================================ ALTER TABLE TASK_EXECUTION ALTER COLUMN START_TIME SET DATA TYPE TIMESTAMP(9); ALTER TABLE TASK_EXECUTION ALTER COLUMN END_TIME SET DATA TYPE TIMESTAMP(9); ALTER TABLE TASK_EXECUTION ALTER COLUMN LAST_UPDATED SET DATA TYPE TIMESTAMP(9); ALTER TABLE TASK_LOCK ALTER COLUMN CREATED_DATE SET DATA TYPE TIMESTAMP(9); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/3.0.x/migration-mysql.sql ================================================ ALTER TABLE TASK_EXECUTION MODIFY COLUMN END_TIME DATETIME(6) NULL; ALTER TABLE TASK_EXECUTION MODIFY COLUMN START_TIME DATETIME(6) NULL; ALTER TABLE TASK_EXECUTION MODIFY COLUMN LAST_UPDATED DATETIME(6) NULL; ALTER TABLE TASK_LOCK MODIFY COLUMN CREATED_DATE DATETIME(6) NULL; ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration/3.0.x/migration-oracle.sql ================================================ ALTER TABLE TASK_EXECUTION MODIFY COLUMN CREATE_TIME TIMESTAMP(9); ALTER TABLE TASK_EXECUTION MODIFY COLUMN START_TIME TIMESTAMP(9); ALTER TABLE TASK_EXECUTION MODIFY COLUMN LAST_UPDATED TIMESTAMP(9); ALTER TABLE TASK_LOCK MODIFY COLUMN CREATED_DATE TIMESTAMP(9); ALTER SEQUENCE TASK_SEQ ORDER; ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/schema-db2.sql ================================================ CREATE TABLE TASK_EXECUTION ( TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , START_TIME TIMESTAMP(9) DEFAULT NULL , END_TIME TIMESTAMP(9) DEFAULT NULL , TASK_NAME VARCHAR(100) , EXIT_CODE INTEGER , EXIT_MESSAGE VARCHAR(2500) , ERROR_MESSAGE VARCHAR(2500) , LAST_UPDATED TIMESTAMP(9), EXTERNAL_EXECUTION_ID VARCHAR(255), PARENT_EXECUTION_ID BIGINT ); CREATE TABLE TASK_EXECUTION_PARAMS ( TASK_EXECUTION_ID BIGINT NOT NULL , TASK_PARAM VARCHAR(2500) , constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE TABLE TASK_TASK_BATCH ( TASK_EXECUTION_ID BIGINT NOT NULL , JOB_EXECUTION_ID BIGINT NOT NULL , constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE SEQUENCE TASK_SEQ AS BIGINT START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE; CREATE TABLE TASK_LOCK ( LOCK_KEY CHAR(36) NOT NULL, REGION VARCHAR(100) NOT NULL, CLIENT_ID CHAR(36), CREATED_DATE TIMESTAMP(9) NOT NULL, EXPIRED_AFTER TIMESTAMP(9) NOT NULL, constraint LOCK_PK primary key (LOCK_KEY, REGION) ); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/schema-h2.sql ================================================ CREATE TABLE TASK_EXECUTION ( TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , START_TIME TIMESTAMP(9) DEFAULT NULL , END_TIME TIMESTAMP(9) DEFAULT NULL , TASK_NAME VARCHAR(100) , EXIT_CODE INTEGER , EXIT_MESSAGE VARCHAR(2500) , ERROR_MESSAGE VARCHAR(2500) , LAST_UPDATED TIMESTAMP(9), EXTERNAL_EXECUTION_ID VARCHAR(255), PARENT_EXECUTION_ID BIGINT ); CREATE TABLE TASK_EXECUTION_PARAMS ( TASK_EXECUTION_ID BIGINT NOT NULL , TASK_PARAM VARCHAR(2500) , constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE TABLE TASK_TASK_BATCH ( TASK_EXECUTION_ID BIGINT NOT NULL , JOB_EXECUTION_ID BIGINT NOT NULL , constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE SEQUENCE TASK_SEQ ; CREATE TABLE TASK_LOCK ( LOCK_KEY CHAR(36) NOT NULL, REGION VARCHAR(100) NOT NULL, CLIENT_ID CHAR(36), CREATED_DATE TIMESTAMP(9) NOT NULL, EXPIRED_AFTER TIMESTAMP(9) NOT NULL, constraint LOCK_PK primary key (LOCK_KEY, REGION) ); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/schema-hsqldb.sql ================================================ CREATE TABLE TASK_EXECUTION ( TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , START_TIME TIMESTAMP(9) DEFAULT NULL , END_TIME TIMESTAMP(9) DEFAULT NULL , TASK_NAME VARCHAR(100) , EXIT_CODE INTEGER , EXIT_MESSAGE VARCHAR(2500) , ERROR_MESSAGE VARCHAR(2500) , LAST_UPDATED TIMESTAMP(9), EXTERNAL_EXECUTION_ID VARCHAR(255), PARENT_EXECUTION_ID BIGINT ); CREATE TABLE TASK_EXECUTION_PARAMS ( TASK_EXECUTION_ID BIGINT NOT NULL , TASK_PARAM VARCHAR(2500) , constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE TABLE TASK_TASK_BATCH ( TASK_EXECUTION_ID BIGINT NOT NULL , JOB_EXECUTION_ID BIGINT NOT NULL , constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE TABLE TASK_SEQ ( ID BIGINT IDENTITY ); CREATE TABLE TASK_LOCK ( LOCK_KEY CHAR(36) NOT NULL, REGION VARCHAR(100) NOT NULL, CLIENT_ID CHAR(36), CREATED_DATE TIMESTAMP(9) NOT NULL, EXPIRED_AFTER TIMESTAMP(9) NOT NULL, constraint LOCK_PK primary key (LOCK_KEY, REGION) ); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/schema-mariadb.sql ================================================ CREATE TABLE TASK_EXECUTION ( TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , START_TIME DATETIME(6) DEFAULT NULL , END_TIME DATETIME(6) DEFAULT NULL , TASK_NAME VARCHAR(100) , EXIT_CODE INTEGER , EXIT_MESSAGE VARCHAR(2500) , ERROR_MESSAGE VARCHAR(2500) , LAST_UPDATED TIMESTAMP, EXTERNAL_EXECUTION_ID VARCHAR(255), PARENT_EXECUTION_ID BIGINT ) ENGINE=InnoDB; CREATE TABLE TASK_EXECUTION_PARAMS ( TASK_EXECUTION_ID BIGINT NOT NULL , TASK_PARAM VARCHAR(2500) , constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ENGINE=InnoDB; CREATE TABLE TASK_TASK_BATCH ( TASK_EXECUTION_ID BIGINT NOT NULL , JOB_EXECUTION_ID BIGINT NOT NULL , constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ENGINE=InnoDB; CREATE TABLE TASK_LOCK ( LOCK_KEY CHAR(36) NOT NULL, REGION VARCHAR(100) NOT NULL, CLIENT_ID CHAR(36), CREATED_DATE DATETIME(6) NOT NULL, EXPIRED_AFTER DATETIME(6) NOT NULL, constraint LOCK_PK primary key (LOCK_KEY, REGION) ) ENGINE=InnoDB; CREATE SEQUENCE TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB; ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/schema-mysql.sql ================================================ CREATE TABLE TASK_EXECUTION ( TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , START_TIME DATETIME(6) DEFAULT NULL , END_TIME DATETIME(6) DEFAULT NULL , TASK_NAME VARCHAR(100) , EXIT_CODE INTEGER , EXIT_MESSAGE VARCHAR(2500) , ERROR_MESSAGE VARCHAR(2500) , LAST_UPDATED TIMESTAMP, EXTERNAL_EXECUTION_ID VARCHAR(255), PARENT_EXECUTION_ID BIGINT ) ENGINE=InnoDB; CREATE TABLE TASK_EXECUTION_PARAMS ( TASK_EXECUTION_ID BIGINT NOT NULL , TASK_PARAM VARCHAR(2500) , constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ENGINE=InnoDB; CREATE TABLE TASK_TASK_BATCH ( TASK_EXECUTION_ID BIGINT NOT NULL , JOB_EXECUTION_ID BIGINT NOT NULL , constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ENGINE=InnoDB; CREATE TABLE TASK_SEQ ( ID BIGINT NOT NULL, UNIQUE_KEY CHAR(1) NOT NULL, constraint UNIQUE_KEY_UN unique (UNIQUE_KEY) ) ENGINE=InnoDB; INSERT INTO TASK_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp; CREATE TABLE TASK_LOCK ( LOCK_KEY CHAR(36) NOT NULL, REGION VARCHAR(100) NOT NULL, CLIENT_ID CHAR(36), CREATED_DATE DATETIME(6) NOT NULL, EXPIRED_AFTER DATETIME(6) NOT NULL, constraint LOCK_PK primary key (LOCK_KEY, REGION) ) ENGINE=InnoDB; ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/schema-oracle.sql ================================================ CREATE TABLE TASK_EXECUTION ( TASK_EXECUTION_ID NUMBER NOT NULL PRIMARY KEY , START_TIME TIMESTAMP(9) DEFAULT NULL , END_TIME TIMESTAMP(9) DEFAULT NULL , TASK_NAME VARCHAR2(100) , EXIT_CODE INTEGER , EXIT_MESSAGE VARCHAR2(2500) , ERROR_MESSAGE VARCHAR2(2500) , LAST_UPDATED TIMESTAMP(9), EXTERNAL_EXECUTION_ID VARCHAR2(255), PARENT_EXECUTION_ID NUMBER ); CREATE TABLE TASK_EXECUTION_PARAMS ( TASK_EXECUTION_ID NUMBER NOT NULL , TASK_PARAM VARCHAR2(2500) , constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE TABLE TASK_TASK_BATCH ( TASK_EXECUTION_ID NUMBER NOT NULL , JOB_EXECUTION_ID NUMBER NOT NULL , constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE SEQUENCE TASK_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 ORDER NOCYCLE; CREATE TABLE TASK_LOCK ( LOCK_KEY VARCHAR2(36) NOT NULL, REGION VARCHAR2(100) NOT NULL, CLIENT_ID VARCHAR2(36), CREATED_DATE TIMESTAMP(9) NOT NULL, EXPIRED_AFTER TIMESTAMP(9) NOT NULL, constraint LOCK_PK primary key (LOCK_KEY, REGION) ); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/schema-postgresql.sql ================================================ CREATE TABLE TASK_EXECUTION ( TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , START_TIME TIMESTAMP DEFAULT NULL , END_TIME TIMESTAMP DEFAULT NULL , TASK_NAME VARCHAR(100) , EXIT_CODE INTEGER , EXIT_MESSAGE VARCHAR(2500) , ERROR_MESSAGE VARCHAR(2500) , LAST_UPDATED TIMESTAMP , EXTERNAL_EXECUTION_ID VARCHAR(255), PARENT_EXECUTION_ID BIGINT ); CREATE TABLE TASK_EXECUTION_PARAMS ( TASK_EXECUTION_ID BIGINT NOT NULL , TASK_PARAM VARCHAR(2500) , constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE TABLE TASK_TASK_BATCH ( TASK_EXECUTION_ID BIGINT NOT NULL , JOB_EXECUTION_ID BIGINT NOT NULL , constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE SEQUENCE TASK_SEQ MAXVALUE 9223372036854775807 NO CYCLE; CREATE TABLE TASK_LOCK ( LOCK_KEY CHAR(36) NOT NULL, REGION VARCHAR(100) NOT NULL, CLIENT_ID CHAR(36), CREATED_DATE TIMESTAMP NOT NULL, EXPIRED_AFTER TIMESTAMP NOT NULL, constraint LOCK_PK primary key (LOCK_KEY, REGION) ); ================================================ FILE: spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/schema-sqlserver.sql ================================================ CREATE TABLE TASK_EXECUTION ( TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY , START_TIME DATETIME DEFAULT NULL , END_TIME DATETIME DEFAULT NULL , TASK_NAME VARCHAR(100) , EXIT_CODE INTEGER , EXIT_MESSAGE VARCHAR(2500) , ERROR_MESSAGE VARCHAR(2500) , LAST_UPDATED DATETIME , EXTERNAL_EXECUTION_ID VARCHAR(255), PARENT_EXECUTION_ID BIGINT ); CREATE TABLE TASK_EXECUTION_PARAMS ( TASK_EXECUTION_ID BIGINT NOT NULL , TASK_PARAM VARCHAR(2500) , constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE TABLE TASK_TASK_BATCH ( TASK_EXECUTION_ID BIGINT NOT NULL , JOB_EXECUTION_ID BIGINT NOT NULL , constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID) references TASK_EXECUTION(TASK_EXECUTION_ID) ) ; CREATE SEQUENCE TASK_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE; CREATE TABLE TASK_LOCK ( LOCK_KEY CHAR(36) NOT NULL, REGION VARCHAR(100) NOT NULL, CLIENT_ID CHAR(36), CREATED_DATE DATETIME NOT NULL, EXPIRED_AFTER DATETIME NOT NULL, constraint LOCK_PK primary key (LOCK_KEY, REGION) ); ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/SimpleSingleTaskAutoConfigurationTests.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task; import org.junit.jupiter.api.Test; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.cloud.task.configuration.SingleInstanceTaskListener; import org.springframework.cloud.task.configuration.SingleTaskConfiguration; import static org.assertj.core.api.Assertions.assertThat; /** * Verifies that the beans created by the SimpleSingleTaskAutoConfigurationConfiguration * specifically that PassThruRegistry was selected. * * @author Glenn Renfro * @since 2.0.0 */ public class SimpleSingleTaskAutoConfigurationTests { @Test public void testConfiguration() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withPropertyValues("spring.cloud.task.singleInstanceEnabled=true"); applicationContextRunner.run((context) -> { SingleInstanceTaskListener singleInstanceTaskListener = context.getBean(SingleInstanceTaskListener.class); assertThat(singleInstanceTaskListener).as("singleInstanceTaskListener should not be null").isNotNull(); assertThat(SingleInstanceTaskListener.class).isEqualTo(singleInstanceTaskListener.getClass()); }); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/SimpleSingleTaskAutoConfigurationWithDataSourceTests.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task; import org.junit.jupiter.api.Test; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.EmbeddedDataSourceConfiguration; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.cloud.task.configuration.SingleInstanceTaskListener; import org.springframework.cloud.task.configuration.SingleTaskConfiguration; import static org.assertj.core.api.Assertions.assertThat; /** * Verifies that the beans created by the SimpleSingleTaskAutoConfigurationConfiguration * specifically that the JdbcLockRegistry was selected. * * @author Glenn Renfro * @since 2.0.0 */ public class SimpleSingleTaskAutoConfigurationWithDataSourceTests { @Test public void testConfiguration() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration( AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class, EmbeddedDataSourceConfiguration.class)) .withPropertyValues("spring.cloud.task.singleInstanceEnabled=true"); applicationContextRunner.run((context) -> { SingleInstanceTaskListener singleInstanceTaskListener = context.getBean(SingleInstanceTaskListener.class); assertThat(singleInstanceTaskListener).as("singleInstanceTaskListener should not be null").isNotNull(); assertThat(SingleInstanceTaskListener.class).isEqualTo(singleInstanceTaskListener.getClass()); }); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/SimpleTaskAutoConfigurationTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task; import javax.sql.DataSource; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.function.Executable; import org.springframework.aop.framework.AopProxyUtils; import org.springframework.aop.scope.ScopedProxyUtils; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.beans.factory.BeanCreationException; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.GenericBeanDefinition; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.EmbeddedDataSourceConfiguration; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.configuration.DefaultTaskConfigurer; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.cloud.task.configuration.SingleTaskConfiguration; import org.springframework.cloud.task.configuration.TaskConfigurer; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskNameResolver; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.cloud.task.repository.support.SimpleTaskRepository; import org.springframework.context.ApplicationContextException; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.mockito.Mockito.mock; /** * Verifies that the beans created by the SimpleTaskAutoConfiguration. * * @author Glenn Renfro * @author Michael Minella */ public class SimpleTaskAutoConfigurationTests { @Test public void testRepository() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)); applicationContextRunner.run((context) -> { TaskRepository taskRepository = context.getBean(TaskRepository.class); assertThat(taskRepository).isNotNull(); Class targetClass = AopProxyUtils.ultimateTargetClass(taskRepository); assertThat(targetClass).isEqualTo(SimpleTaskRepository.class); }); } @Test public void testAutoConfigurationDisabled() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withPropertyValues("spring.cloud.task.autoconfiguration.enabled=false"); Executable executable = () -> { applicationContextRunner.run((context) -> { context.getBean(TaskRepository.class); }); }; verifyExceptionThrown( NoSuchBeanDefinitionException.class, "No qualifying " + "bean of type 'org.springframework.cloud.task.repository.TaskRepository' " + "available", executable); } @Test public void testRepositoryInitialized() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(EmbeddedDataSourceConfiguration.class, PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withUserConfiguration(TaskLifecycleListenerConfiguration.class); applicationContextRunner.run((context) -> { TaskExplorer taskExplorer = context.getBean(TaskExplorer.class); assertThat(taskExplorer.getTaskExecutionCount()).isEqualTo(1L); }); } @Test public void testRepositoryBeansDependOnTaskRepositoryInitializer() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(EmbeddedDataSourceConfiguration.class, PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withUserConfiguration(TaskLifecycleListenerConfiguration.class); applicationContextRunner.run((context) -> { ConfigurableListableBeanFactory beanFactory = context.getBeanFactory(); String[] taskRepositoryNames = beanFactory.getBeanNamesForType(TaskRepository.class); assertThat(taskRepositoryNames).isNotEmpty(); for (String taskRepositoryName : taskRepositoryNames) { assertThat(beanFactory.getBeanDefinition(taskRepositoryName).getDependsOn()) .contains("taskRepositoryInitializer"); } }); } @Test public void testRepositoryNotInitialized() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(EmbeddedDataSourceConfiguration.class, PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withUserConfiguration(TaskLifecycleListenerConfiguration.class) .withPropertyValues("spring.cloud.task.tablePrefix=foobarless"); verifyExceptionThrownDefaultExecutable(ApplicationContextException.class, applicationContextRunner); } @Test public void testTaskNameResolver() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(EmbeddedDataSourceConfiguration.class, PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withUserConfiguration(TaskLifecycleListenerConfiguration.class) .withPropertyValues("spring.cloud.task.name=myTestName"); applicationContextRunner.run((context) -> { TaskNameResolver taskNameResolver = context.getBean(TaskNameResolver.class); assertThat(taskNameResolver.getTaskName()).isEqualTo("myTestName"); }); } @Test public void testMultipleConfigurers() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withUserConfiguration(MultipleConfigurers.class); verifyExceptionThrownDefaultExecutable(BeanCreationException.class, "Error creating bean " + "with name 'org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration': Invocation of init method failed", applicationContextRunner); } @Test public void testMultipleDataSources() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withUserConfiguration(MultipleDataSources.class); verifyExceptionThrownDefaultExecutable(BeanCreationException.class, "Error creating bean " + "with name 'org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration': Invocation of init method failed", applicationContextRunner); } @Test void testSpecifyTransactionManager() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withBean("transactionManager", ResourcelessTransactionManager.class) .withPropertyValues("spring.cloud.task.transaction-manager=transactionManager"); applicationContextRunner.run((context) -> { assertThat(context.getBeanNamesForType(PlatformTransactionManager.class)).hasSize(1) .contains("transactionManager") .doesNotContain("springCloudTaskTransactionManager"); }); } @Test void testDefaultTransactionManager() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)); applicationContextRunner.run((context) -> { assertThat(context.getBeanNamesForType(PlatformTransactionManager.class)).hasSize(1) .contains("springCloudTaskTransactionManager") .doesNotContain("transactionManager"); }); } public void verifyExceptionThrownDefaultExecutable(Class classToCheck, ApplicationContextRunner applicationContextRunner) { Executable executable = () -> { applicationContextRunner.run((context) -> { Throwable expectedException = context.getStartupFailure(); assertThat(expectedException).isNotNull(); throw expectedException; }); }; assertThatExceptionOfType(classToCheck).isThrownBy(executable::execute); } public void verifyExceptionThrownDefaultExecutable(Class classToCheck, String message, ApplicationContextRunner applicationContextRunner) { Executable executable = () -> { applicationContextRunner.run((context) -> { Throwable expectedException = context.getStartupFailure(); assertThat(expectedException).isNotNull(); throw expectedException; }); }; verifyExceptionThrown(classToCheck, message, executable); } public void verifyExceptionThrown(Class classToCheck, String message, Executable executable) { assertThatExceptionOfType(classToCheck).isThrownBy(executable::execute).withMessage(message); } /** * Verify that the verifyEnvironment method skips DataSource Proxy Beans when * determining the number of available dataSources. */ @Test public void testWithDataSourceProxy() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(EmbeddedDataSourceConfiguration.class, PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withUserConfiguration(DataSourceProxyConfiguration.class); applicationContextRunner.run((context) -> { assertThat(context.getBeanNamesForType(DataSource.class).length).isEqualTo(2); SimpleTaskAutoConfiguration taskConfiguration = context.getBean(SimpleTaskAutoConfiguration.class); assertThat(taskConfiguration).isNotNull(); assertThat(taskConfiguration.taskExplorer()).isNotNull(); }); } @Configuration public static class MultipleConfigurers { @Bean public TaskConfigurer taskConfigurer1() { return new DefaultTaskConfigurer((DataSource) null); } @Bean public TaskConfigurer taskConfigurer2() { return new DefaultTaskConfigurer((DataSource) null); } } @Configuration public static class MultipleDataSources { @Bean public DataSource dataSource() { return mock(DataSource.class); } @Bean public DataSource dataSource2() { return mock(DataSource.class); } } @Configuration public static class DataSourceProxyConfiguration { @Autowired private ConfigurableApplicationContext context; @Bean public BeanDefinitionHolder proxyDataSource() { GenericBeanDefinition proxyBeanDefinition = new GenericBeanDefinition(); proxyBeanDefinition.setBeanClassName("javax.sql.DataSource"); BeanDefinitionHolder myDataSource = new BeanDefinitionHolder(proxyBeanDefinition, "dataSource2"); ScopedProxyUtils.createScopedProxy(myDataSource, (BeanDefinitionRegistry) this.context.getBeanFactory(), true); return myDataSource; } } @EnableTask @Configuration public static class TaskLifecycleListenerConfiguration { } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/TaskCoreTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.test.system.CapturedOutput; import org.springframework.boot.test.system.OutputCaptureExtension; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.context.ApplicationContextException; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import static org.assertj.core.api.Assertions.assertThat; /** * Verifies core behavior for Tasks. * * @author Glenn Renfro */ @ExtendWith(OutputCaptureExtension.class) public class TaskCoreTests { private static final String TASK_NAME = "taskEventTest"; private static final String EXCEPTION_MESSAGE = "FOO EXCEPTION"; private static final String CREATE_TASK_MESSAGE = "Creating: TaskExecution{executionId="; private static final String UPDATE_TASK_MESSAGE = "Updating: TaskExecution with executionId="; private static final String SUCCESS_EXIT_CODE_MESSAGE = "with the following {exitCode=0"; private static final String EXCEPTION_EXIT_CODE_MESSAGE = "with the following {exitCode=1"; private static final String EXCEPTION_INVALID_TASK_EXECUTION_ID = "java.lang.IllegalArgumentException: " + "Invalid TaskExecution, ID 55 not found"; private static final String ERROR_MESSAGE = "errorMessage='java.lang.IllegalStateException: " + "FOO EXCEPTION"; private ConfigurableApplicationContext applicationContext; @AfterEach public void teardown() { if (this.applicationContext != null && this.applicationContext.isActive()) { this.applicationContext.close(); } } @Test public void successfulTaskTest(CapturedOutput capturedOutput) { this.applicationContext = SpringApplication.run(TaskConfiguration.class, "--spring.cloud.task.closecontext.enable=false", "--spring.cloud.task.name=" + TASK_NAME, "--spring.main.web-environment=false"); String output = capturedOutput.toString(); assertThat(output.contains(CREATE_TASK_MESSAGE)).as("Test results do not show create task message: " + output) .isTrue(); assertThat(output.contains(UPDATE_TASK_MESSAGE)).as("Test results do not show success message: " + output) .isTrue(); assertThat(output.contains(SUCCESS_EXIT_CODE_MESSAGE)).as("Test results have incorrect exit code: " + output) .isTrue(); } /** * Test to verify that deprecated annotation does not affect task execution. */ @Test public void successfulTaskTestWithAnnotation(CapturedOutput capturedOutput) { this.applicationContext = SpringApplication.run(TaskConfigurationWithAnotation.class, "--spring.cloud.task.closecontext.enable=false", "--spring.cloud.task.name=" + TASK_NAME, "--spring.main.web-environment=false"); String output = capturedOutput.toString(); assertThat(output.contains(CREATE_TASK_MESSAGE)).as("Test results do not show create task message: " + output) .isTrue(); assertThat(output.contains(UPDATE_TASK_MESSAGE)).as("Test results do not show success message: " + output) .isTrue(); assertThat(output.contains(SUCCESS_EXIT_CODE_MESSAGE)).as("Test results have incorrect exit code: " + output) .isTrue(); } @Test public void exceptionTaskTest(CapturedOutput capturedOutput) { boolean exceptionFired = false; try { this.applicationContext = SpringApplication.run(TaskExceptionConfiguration.class, "--spring.cloud.task.closecontext.enable=false", "--spring.cloud.task.name=" + TASK_NAME, "--spring.main.web-environment=false"); } catch (IllegalStateException exception) { exceptionFired = true; } assertThat(exceptionFired).as("An IllegalStateException should have been thrown").isTrue(); String output = capturedOutput.toString(); assertThat(output.contains(CREATE_TASK_MESSAGE)).as("Test results do not show create task message: " + output) .isTrue(); assertThat(output.contains(UPDATE_TASK_MESSAGE)).as("Test results do not show success message: " + output) .isTrue(); assertThat(output.contains(EXCEPTION_EXIT_CODE_MESSAGE)).as("Test results have incorrect exit code: " + output) .isTrue(); assertThat(output.contains(ERROR_MESSAGE)).as("Test results have incorrect exit message: " + output).isTrue(); assertThat(output.contains(EXCEPTION_MESSAGE)).as("Test results have exception message: " + output).isTrue(); } @Test public void invalidExecutionId(CapturedOutput capturedOutput) { boolean exceptionFired = false; try { this.applicationContext = SpringApplication.run(TaskExceptionConfiguration.class, "--spring.cloud.task.closecontext.enable=false", "--spring.cloud.task.name=" + TASK_NAME, "--spring.main.web-environment=false", "--spring.cloud.task.executionid=55"); } catch (ApplicationContextException exception) { exceptionFired = true; } assertThat(exceptionFired).as("An ApplicationContextException should have been thrown").isTrue(); String output = capturedOutput.toString(); assertThat(output.contains(EXCEPTION_INVALID_TASK_EXECUTION_ID)) .as("Test results do not show the correct exception message: " + output) .isTrue(); } @EnableTask @ImportAutoConfiguration({ SimpleTaskAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class }) public static class TaskConfiguration { @Bean public CommandLineRunner commandLineRunner() { return new CommandLineRunner() { @Override public void run(String... strings) throws Exception { } }; } } @EnableTask @ImportAutoConfiguration({ SimpleTaskAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class }) public static class TaskConfigurationWithAnotation { @Bean public CommandLineRunner commandLineRunner() { return new CommandLineRunner() { @Override public void run(String... strings) throws Exception { } }; } } @EnableTask @ImportAutoConfiguration({ SimpleTaskAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class }) public static class TaskExceptionConfiguration { @Bean public CommandLineRunner commandLineRunner() { return new CommandLineRunner() { @Override public void run(String... strings) throws Exception { throw new IllegalStateException(EXCEPTION_MESSAGE); } }; } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/TaskRepositoryInitializerDefaultTaskConfigurerTests.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task; import java.util.List; import java.util.Map; import javax.sql.DataSource; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.jdbc.autoconfigure.EmbeddedDataSourceConfiguration; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.cloud.task.configuration.TaskConfigurer; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; /** * Verifies that TaskRepositoryInitializer creates tables if a {@link TaskConfigurer} has * a {@link DataSource}. * * @author Glenn Renfro * @since 2.0.0 */ @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = { SimpleTaskAutoConfiguration.class, EmbeddedDataSourceConfiguration.class }) @DirtiesContext public class TaskRepositoryInitializerDefaultTaskConfigurerTests { @Autowired private DataSource dataSource; @Test public void testTablesCreated() { JdbcTemplate jdbcTemplate = new JdbcTemplate(this.dataSource); List> rows = jdbcTemplate.queryForList("SHOW TABLES"); assertThat(rows.size()).isEqualTo(4); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/TaskRepositoryInitializerNoDataSourceTaskConfigurerTests.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task; import java.util.List; import java.util.Map; import javax.sql.DataSource; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.jdbc.autoconfigure.EmbeddedDataSourceConfiguration; import org.springframework.cloud.task.configuration.DefaultTaskConfigurer; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.cloud.task.configuration.SingleTaskConfiguration; import org.springframework.cloud.task.configuration.TaskConfigurer; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; /** * Verifies that TaskRepositoryInitializer does not create tables if a * {@link TaskConfigurer} has no {@link DataSource}. * * @author Glenn Renfro * @since 2.0.0 */ @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = { SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class, EmbeddedDataSourceConfiguration.class, DefaultTaskConfigurer.class }) public class TaskRepositoryInitializerNoDataSourceTaskConfigurerTests { @Autowired private DataSource dataSource; @Test public void testNoTablesCreated() { JdbcTemplate jdbcTemplate = new JdbcTemplate(this.dataSource); List> rows = jdbcTemplate.queryForList("SHOW TABLES"); assertThat(rows.size()).isEqualTo(0); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/configuration/DefaultTaskConfigurerTests.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import javax.sql.DataSource; import jakarta.persistence.EntityManager; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.jdbc.autoconfigure.EmbeddedDataSourceConfiguration; import org.springframework.cloud.task.repository.support.SimpleTaskRepository; import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.test.util.ReflectionTestUtils; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; /** * @author Glenn Renfro * @author Mahmoud Ben Hassine */ @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = { EmbeddedDataSourceConfiguration.class }) public class DefaultTaskConfigurerTests { @Autowired DataSource dataSource; @Autowired ApplicationContext context; @Test public void resourcelessTransactionManagerTest() { DefaultTaskConfigurer defaultTaskConfigurer = new DefaultTaskConfigurer(); assertThat(defaultTaskConfigurer.getTransactionManager().getClass().getName()) .isEqualTo("org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager"); defaultTaskConfigurer = new DefaultTaskConfigurer("foo"); assertThat(defaultTaskConfigurer.getTransactionManager().getClass().getName()) .isEqualTo("org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager"); } @Test public void testDefaultContext() { AnnotationConfigApplicationContext localContext = new AnnotationConfigApplicationContext(); localContext.register(EmbeddedDataSourceConfiguration.class, EntityManagerConfiguration.class); localContext.refresh(); DefaultTaskConfigurer defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource, TaskProperties.DEFAULT_TABLE_PREFIX, localContext); assertThat(defaultTaskConfigurer.getTransactionManager().getClass().getName()) .isEqualTo("org.springframework.orm.jpa.JpaTransactionManager"); } @Test public void dataSourceTransactionManagerTest() { DefaultTaskConfigurer defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource); assertThat(defaultTaskConfigurer.getTransactionManager().getClass().getName()) .isEqualTo("org.springframework.jdbc.support.JdbcTransactionManager"); defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource, "FOO", null); assertThat(defaultTaskConfigurer.getTransactionManager().getClass().getName()) .isEqualTo("org.springframework.jdbc.support.JdbcTransactionManager"); defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource, "FOO", this.context); assertThat(defaultTaskConfigurer.getTransactionManager().getClass().getName()) .isEqualTo("org.springframework.jdbc.support.JdbcTransactionManager"); } @Test public void taskExplorerTest() { DefaultTaskConfigurer defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource); assertThat(defaultTaskConfigurer.getTaskExplorer()).isNotNull(); defaultTaskConfigurer = new DefaultTaskConfigurer(); assertThat(defaultTaskConfigurer.getTaskExplorer()).isNotNull(); } @Test public void taskNameResolverTest() { DefaultTaskConfigurer defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource); assertThat(defaultTaskConfigurer.getTaskNameResolver()).isNotNull(); defaultTaskConfigurer = new DefaultTaskConfigurer(); assertThat(defaultTaskConfigurer.getTaskNameResolver()).isNotNull(); } @Test public void taskRepositoryTest() { DefaultTaskConfigurer defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource); assertThat(defaultTaskConfigurer.getTaskRepository()).isNotNull(); defaultTaskConfigurer = new DefaultTaskConfigurer(); assertThat(defaultTaskConfigurer.getTaskRepository()).isNotNull(); } @Test public void taskDataSource() { DefaultTaskConfigurer defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource); assertThat(defaultTaskConfigurer.getTaskDataSource()).isNotNull(); defaultTaskConfigurer = new DefaultTaskConfigurer(); assertThat(defaultTaskConfigurer.getTaskDataSource()).isNull(); } @Test public void taskDataSourceWithProperties() { TaskProperties taskProperties = new TaskProperties(); taskProperties.setTablePrefix("foo"); DefaultTaskConfigurer defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource, taskProperties); assertThat(defaultTaskConfigurer.getTaskDataSource()).isNotNull(); String prefix = getPrefix(defaultTaskConfigurer); assertThat(prefix).isEqualTo("foo"); System.out.println(prefix); defaultTaskConfigurer = new DefaultTaskConfigurer(); validatePrefix(defaultTaskConfigurer, "TASK_"); defaultTaskConfigurer = new DefaultTaskConfigurer(taskProperties); validatePrefix(defaultTaskConfigurer, "TASK_"); defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource); validatePrefix(defaultTaskConfigurer, "TASK_"); defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource, taskProperties); validatePrefix(defaultTaskConfigurer, "foo"); defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource, new TaskProperties()); validatePrefix(defaultTaskConfigurer, "TASK_"); defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource, null, null); validatePrefix(defaultTaskConfigurer, "TASK_"); defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource, "bar", null); validatePrefix(defaultTaskConfigurer, "bar"); defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource, "bar", null, null); validatePrefix(defaultTaskConfigurer, "bar"); defaultTaskConfigurer = new DefaultTaskConfigurer(this.dataSource, "bar", null, taskProperties); validatePrefix(defaultTaskConfigurer, "bar"); } private void validatePrefix(DefaultTaskConfigurer defaultTaskConfigurer, String prefix) { String result = getPrefix(defaultTaskConfigurer); assertThat(result).isEqualTo(prefix); } private String getPrefix(DefaultTaskConfigurer defaultTaskConfigurer) { SimpleTaskRepository taskRepository = (SimpleTaskRepository) ReflectionTestUtils.getField(defaultTaskConfigurer, "taskRepository"); TaskExecutionDaoFactoryBean factoryBean = (TaskExecutionDaoFactoryBean) ReflectionTestUtils .getField(taskRepository, "taskExecutionDaoFactoryBean"); return (String) ReflectionTestUtils.getField(factoryBean, "tablePrefix"); } @Configuration public static class EntityManagerConfiguration { @Bean public EntityManager entityManager() { return mock(EntityManager.class); } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/configuration/RepositoryTransactionManagerConfigurationTests.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import java.time.LocalDateTime; import java.util.ArrayList; import javax.sql.DataSource; import org.junit.jupiter.api.Test; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.listener.TaskLifecycleListener; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; import org.springframework.jdbc.support.JdbcTransactionManager; import org.springframework.test.jdbc.JdbcTestUtils; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.support.DefaultTransactionStatus; import static org.assertj.core.api.Assertions.assertThat; /** * @author Michael Minella * @author Mahmoud Ben Hassine */ public class RepositoryTransactionManagerConfigurationTests { @Test public void testZeroCustomTransactionManagerConfiguration() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, ZeroTransactionManagerConfiguration.class)) .withPropertyValues("application.name=transactionManagerTask"); applicationContextRunner.run((context) -> { DataSource dataSource = context.getBean("dataSource", DataSource.class); int taskExecutionCount = JdbcTestUtils.countRowsInTable(new JdbcTemplate(dataSource), "TASK_EXECUTION"); assertThat(taskExecutionCount).isEqualTo(1); }); } @Test public void testSingleCustomTransactionManagerConfiguration() { testConfiguration(SingleTransactionManagerConfiguration.class); } @Test public void testMultipleCustomTransactionManagerConfiguration() { testConfiguration(MultipleTransactionManagerConfiguration.class); } private void testConfiguration(Class configurationClass) { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, configurationClass)) .withPropertyValues("application.name=transactionManagerTask"); applicationContextRunner.run((context) -> { DataSource dataSource = context.getBean("dataSource", DataSource.class); int taskExecutionCount = JdbcTestUtils.countRowsInTable(new JdbcTemplate(dataSource), "TASK_EXECUTION"); // Verify that the create call was rolled back assertThat(taskExecutionCount).isEqualTo(0); // Execute a new create call so that things close cleanly TaskRepository taskRepository = context.getBean("taskRepository", TaskRepository.class); TaskExecution taskExecution = taskRepository.createTaskExecution("transactionManagerTask"); taskExecution = taskRepository.startTaskExecution(taskExecution.getExecutionId(), taskExecution.getTaskName(), LocalDateTime.now(), new ArrayList<>(0), null); TaskLifecycleListener listener = context.getBean(TaskLifecycleListener.class); ReflectionTestUtils.setField(listener, "taskExecution", taskExecution); }); } @EnableTask @Configuration public static class ZeroTransactionManagerConfiguration { @Bean public TaskConfigurer taskConfigurer(DataSource dataSource) { return new DefaultTaskConfigurer(dataSource); } @Bean public DataSource dataSource() { return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2).build(); } } @EnableTask @Configuration public static class SingleTransactionManagerConfiguration { @Bean public TaskConfigurer taskConfigurer(DataSource dataSource, PlatformTransactionManager transactionManager) { return new DefaultTaskConfigurer(dataSource) { @Override public PlatformTransactionManager getTransactionManager() { return transactionManager; } }; } @Bean public DataSource dataSource() { return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2).build(); } @Bean public JdbcTransactionManager transactionManager(DataSource dataSource) { return new TestJdbcTransactionManager(dataSource); } } @EnableTask @Configuration public static class MultipleTransactionManagerConfiguration { @Bean public TaskConfigurer taskConfigurer(DataSource dataSource, PlatformTransactionManager transactionManager) { return new DefaultTaskConfigurer(dataSource) { @Override public PlatformTransactionManager getTransactionManager() { return transactionManager; } }; } @Bean public DataSource dataSource() { return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2).build(); } @Bean public JdbcTransactionManager transactionManager(DataSource dataSource) { return new TestJdbcTransactionManager(dataSource); } @Bean public DataSource dataSource2() { return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2).build(); } @Bean public JdbcTransactionManager transactionManager2(DataSource dataSource2) { return new JdbcTransactionManager(dataSource2); } } private static class TestJdbcTransactionManager extends JdbcTransactionManager { protected TestJdbcTransactionManager(DataSource dataSource) { super(dataSource); } private int count = 0; @Override protected void doCommit(DefaultTransactionStatus status) { if (count == 0) { // Rollback the finish of the task super.doRollback(status); } else { // Commit the start of the task super.doCommit(status); } count++; } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/configuration/TaskPropertiesTests.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit.jupiter.SpringExtension; import static org.assertj.core.api.Assertions.assertThat; @DirtiesContext @ExtendWith(SpringExtension.class) @SpringBootTest(classes = { SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class }, properties = { "spring.cloud.task.closecontextEnabled=false", "spring.cloud.task.initialize-enabled=false" }) public class TaskPropertiesTests { @Autowired TaskProperties taskProperties; @Test public void test() { assertThat(this.taskProperties.getClosecontextEnabled()).isFalse(); assertThat(this.taskProperties.isInitializeEnabled()).isFalse(); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/configuration/TestConfiguration.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration; import javax.sql.DataSource; import org.springframework.batch.infrastructure.support.transaction.ResourcelessTransactionManager; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; import org.springframework.cloud.task.repository.support.SimpleTaskRepository; import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.cloud.task.repository.support.TaskRepositoryInitializer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.io.ResourceLoader; import org.springframework.jdbc.support.JdbcTransactionManager; import org.springframework.transaction.PlatformTransactionManager; /** * @author Michael Minella * @author Mahmoud Ben Hassine */ @Configuration public class TestConfiguration implements InitializingBean { @Autowired(required = false) private DataSource dataSource; @Autowired(required = false) private ResourceLoader resourceLoader; private TaskExecutionDaoFactoryBean taskExecutionDaoFactoryBean; @Bean public TaskRepositoryInitializer taskRepositoryInitializer() throws Exception { TaskRepositoryInitializer taskRepositoryInitializer = new TaskRepositoryInitializer(new TaskProperties()); taskRepositoryInitializer.setDataSource(this.dataSource); taskRepositoryInitializer.setResourceLoader(this.resourceLoader); taskRepositoryInitializer.afterPropertiesSet(); return taskRepositoryInitializer; } @Bean public TaskExplorer taskExplorer() { return new SimpleTaskExplorer(this.taskExecutionDaoFactoryBean); } @Bean public TaskRepository taskRepository() { return new SimpleTaskRepository(this.taskExecutionDaoFactoryBean); } @Bean public PlatformTransactionManager transactionManager() { if (this.dataSource == null) { return new ResourcelessTransactionManager(); } else { return new JdbcTransactionManager(this.dataSource); } } @Override public void afterPropertiesSet() { if (this.dataSource != null) { this.taskExecutionDaoFactoryBean = new TaskExecutionDaoFactoryBean(this.dataSource); } else { this.taskExecutionDaoFactoryBean = new TaskExecutionDaoFactoryBean(); } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/configuration/observation/ObservationIntegrationTests.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.configuration.observation; import java.util.List; import java.util.stream.Collectors; import brave.sampler.Sampler; import brave.test.TestSpanHandler; import io.micrometer.common.KeyValues; import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.core.tck.MeterRegistryAssert; import io.micrometer.tracing.Tracer; import io.micrometer.tracing.brave.bridge.BraveFinishedSpan; import io.micrometer.tracing.exporter.FinishedSpan; import io.micrometer.tracing.test.simple.SpansAssert; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.ApplicationRunner; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; import org.springframework.boot.micrometer.metrics.autoconfigure.CompositeMeterRegistryAutoConfiguration; import org.springframework.boot.micrometer.metrics.autoconfigure.MetricsAutoConfiguration; import org.springframework.boot.micrometer.observation.autoconfigure.ObservationAutoConfiguration; import org.springframework.boot.micrometer.tracing.autoconfigure.MicrometerTracingAutoConfiguration; import org.springframework.boot.micrometer.tracing.brave.autoconfigure.BraveAutoConfiguration; import org.springframework.boot.micrometer.tracing.test.autoconfigure.AutoConfigureTracing; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.zipkin.autoconfigure.ZipkinAutoConfiguration; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @AutoConfigureTracing @SpringBootTest(classes = ObservationIntegrationTests.Config.class) class ObservationIntegrationTests { @Autowired TestSpanHandler testSpanHandler; @Autowired MeterRegistry meterRegistry; @Test void testSuccessfulObservation() { List finishedSpans = finishedSpans(); SpansAssert.then(finishedSpans) .thenASpanWithNameEqualTo("myCommandLineRunner") .hasTag("spring.cloud.task.runner.bean-name", "myCommandLineRunner") .backToSpans() .thenASpanWithNameEqualTo("myApplicationRunner") .hasTag("spring.cloud.task.runner.bean-name", "myApplicationRunner"); MeterRegistryAssert.then(this.meterRegistry) .hasTimerWithNameAndTags("spring.cloud.task.runner", KeyValues.of("spring.cloud.task.runner.bean-name", "myCommandLineRunner")) .hasTimerWithNameAndTags("spring.cloud.task.runner", KeyValues.of("spring.cloud.task.runner.bean-name", "myApplicationRunner")); } private List finishedSpans() { return this.testSpanHandler.spans().stream().map(BraveFinishedSpan::fromBrave).collect(Collectors.toList()); } @Configuration @EnableTask @ImportAutoConfiguration({ SimpleTaskAutoConfiguration.class, ObservationAutoConfiguration.class, ObservationTaskAutoConfiguration.class, BraveAutoConfiguration.class, MicrometerTracingAutoConfiguration.class, MetricsAutoConfiguration.class, CompositeMeterRegistryAutoConfiguration.class, ZipkinAutoConfiguration.class }) static class Config { private static final Logger log = LoggerFactory.getLogger(Config.class); @Bean TestSpanHandler testSpanHandler() { return new TestSpanHandler(); } @Bean Sampler sampler() { return Sampler.ALWAYS_SAMPLE; } @Bean CommandLineRunner myCommandLineRunner(Tracer tracer) { return args -> log.info(" Hello from command line runner", tracer.currentSpan().context().traceId()); } @Bean ApplicationRunner myApplicationRunner(Tracer tracer) { return args -> log.info(" Hello from application runner", tracer.currentSpan().context().traceId()); } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/listener/TaskExceptionTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro */ public class TaskExceptionTests { private static final String ERROR_MESSAGE = "ERROR MESSAGE"; @Test public void testTaskException() { TaskException taskException = new TaskException(ERROR_MESSAGE); assertThat(taskException.getMessage()).isEqualTo(ERROR_MESSAGE); taskException = new TaskException(ERROR_MESSAGE, new IllegalStateException(ERROR_MESSAGE)); assertThat(taskException.getMessage()).isEqualTo(ERROR_MESSAGE); assertThat(taskException.getCause()).isNotNull(); assertThat(taskException.getCause().getMessage()).isEqualTo(ERROR_MESSAGE); } @Test public void testTaskExecutionException() { TaskExecutionException taskException = new TaskExecutionException(ERROR_MESSAGE); assertThat(taskException.getMessage()).isEqualTo(ERROR_MESSAGE); taskException = new TaskExecutionException(ERROR_MESSAGE, new IllegalStateException(ERROR_MESSAGE)); assertThat(taskException.getMessage()).isEqualTo(ERROR_MESSAGE); assertThat(taskException.getCause()).isNotNull(); assertThat(taskException.getCause().getMessage()).isEqualTo(ERROR_MESSAGE); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/listener/TaskExecutionListenerTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import java.time.Duration; import java.time.LocalDateTime; import java.util.ArrayList; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.context.event.ApplicationFailedEvent; import org.springframework.boot.context.event.ApplicationReadyEvent; import org.springframework.cloud.task.listener.annotation.AfterTask; import org.springframework.cloud.task.listener.annotation.BeforeTask; import org.springframework.cloud.task.listener.annotation.FailedTask; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.util.TestDefaultConfiguration; import org.springframework.cloud.task.util.TestListener; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import static org.assertj.core.api.Assertions.assertThat; /** * Verifies that the TaskExecutionListener invocations occur at the appropriate task * lifecycle stages. * * @author Glenn Renfro */ public class TaskExecutionListenerTests { private static final String EXCEPTION_MESSAGE = "This was expected"; private static boolean beforeTaskDidFireOnError = false; private static boolean endTaskDidFireOnError = false; private static boolean failedTaskDidFireOnError = false; private AnnotationConfigApplicationContext context; @BeforeTask public void setup() { beforeTaskDidFireOnError = false; endTaskDidFireOnError = false; failedTaskDidFireOnError = false; } @AfterEach public void tearDown() { if (this.context != null && this.context.isActive()) { this.context.close(); } } /** * Verify that if a TaskExecutionListener Bean is present that the onTaskStartup * method is called. */ @Test public void testTaskCreate() { setupContextForTaskExecutionListener(); DefaultTaskListenerConfiguration.TestTaskExecutionListener taskExecutionListener = this.context .getBean(DefaultTaskListenerConfiguration.TestTaskExecutionListener.class); TaskExecution taskExecution = new TaskExecution(0, null, "wombat", LocalDateTime.now(), LocalDateTime.now(), null, new ArrayList<>(), null, null); verifyListenerResults(false, false, taskExecution, taskExecutionListener); } /** * Verify that if a LifecycleProcessor executes all TaskExecutionListeners if * BeforeTask throws exception. */ @Test public void testBeforeTaskErrorCreate() { boolean exceptionFired = false; try { setupContextForBeforeTaskErrorAnnotatedListener(); } catch (Exception exception) { exceptionFired = true; } assertThat(exceptionFired).as("Exception should have fired").isTrue(); assertThat(beforeTaskDidFireOnError).as("BeforeTask Listener should have executed").isTrue(); assertThat(endTaskDidFireOnError).as("EndTask Listener should have executed").isTrue(); assertThat(failedTaskDidFireOnError).as("FailedTask Listener should have executed").isTrue(); } /** * Verify that if a LifecycleProcessor executes AfterTask TaskExecutionListeners if * FailedTask throws exception. */ @Test public void testFailedTaskErrorCreate() { boolean exceptionFired = false; try { setupContextForFailedTaskErrorAnnotatedListener(); } catch (Exception exception) { exceptionFired = true; } assertThat(exceptionFired).as("Exception should have fired").isTrue(); assertThat(endTaskDidFireOnError).as("EndTask Listener should have executed").isTrue(); assertThat(failedTaskDidFireOnError).as("FailedTask Listener should not have executed").isTrue(); } /** * Verify that if a LifecycleProcessor stores the correct exit code if AfterTask * listener fails. */ @Test public void testAfterTaskErrorCreate() { setupContextForAfterTaskErrorAnnotatedListener(); AfterTaskErrorAnnotationConfiguration.AnnotatedTaskListener taskExecutionListener = this.context .getBean(AfterTaskErrorAnnotationConfiguration.AnnotatedTaskListener.class); this.context.publishEvent(new ApplicationReadyEvent(new SpringApplication(), new String[0], this.context, Duration.ofSeconds(50))); assertThat(taskExecutionListener.isTaskStartup()).isTrue(); assertThat(taskExecutionListener.isTaskEnd()).isTrue(); assertThat(taskExecutionListener.getTaskExecution().getExitMessage()).isEqualTo(TestListener.END_MESSAGE); assertThat(taskExecutionListener.getTaskExecution() .getErrorMessage() .contains("Failed to process @BeforeTask or @AfterTask annotation because: AfterTaskFailure")).isTrue(); assertThat(taskExecutionListener.getThrowable()).isNull(); } /** * Verify that if a TaskExecutionListener Bean is present that the onTaskEnd method is * called. */ @Test public void testTaskUpdate() { setupContextForTaskExecutionListener(); DefaultTaskListenerConfiguration.TestTaskExecutionListener taskExecutionListener = this.context .getBean(DefaultTaskListenerConfiguration.TestTaskExecutionListener.class); this.context.publishEvent(new ApplicationReadyEvent(new SpringApplication(), new String[0], this.context, Duration.ofSeconds(50))); TaskExecution taskExecution = new TaskExecution(0, 0, "wombat", LocalDateTime.now(), LocalDateTime.now(), null, new ArrayList<>(), null, null); verifyListenerResults(true, false, taskExecution, taskExecutionListener); } /** * Verify that if a TaskExecutionListener Bean is present that the onTaskFailed method * is called. */ @Test public void testTaskFail() { RuntimeException exception = new RuntimeException(EXCEPTION_MESSAGE); setupContextForTaskExecutionListener(); SpringApplication application = new SpringApplication(); DefaultTaskListenerConfiguration.TestTaskExecutionListener taskExecutionListener = this.context .getBean(DefaultTaskListenerConfiguration.TestTaskExecutionListener.class); this.context.publishEvent(new ApplicationFailedEvent(application, new String[0], this.context, exception)); this.context .publishEvent(new ApplicationReadyEvent(application, new String[0], this.context, Duration.ofSeconds(50))); TaskExecution taskExecution = new TaskExecution(0, 1, "wombat", LocalDateTime.now(), LocalDateTime.now(), null, new ArrayList<>(), null, null); verifyListenerResults(true, true, taskExecution, taskExecutionListener); } /** * Verify that if a bean has a @BeforeTask annotation present that the associated * method is called. */ @Test public void testAnnotationCreate() { setupContextForAnnotatedListener(); DefaultAnnotationConfiguration.AnnotatedTaskListener annotatedListener = this.context .getBean(DefaultAnnotationConfiguration.AnnotatedTaskListener.class); TaskExecution taskExecution = new TaskExecution(0, null, "wombat", LocalDateTime.now(), LocalDateTime.now(), null, new ArrayList<>(), null, null); verifyListenerResults(false, false, taskExecution, annotatedListener); } /** * Verify that if a bean has a @AfterTask annotation present that the associated * method is called. */ @Test public void testAnnotationUpdate() { setupContextForAnnotatedListener(); DefaultAnnotationConfiguration.AnnotatedTaskListener annotatedListener = this.context .getBean(DefaultAnnotationConfiguration.AnnotatedTaskListener.class); this.context.publishEvent(new ApplicationReadyEvent(new SpringApplication(), new String[0], this.context, Duration.ofSeconds(50))); TaskExecution taskExecution = new TaskExecution(0, 0, "wombat", LocalDateTime.now(), LocalDateTime.now(), null, new ArrayList<>(), null, null); verifyListenerResults(true, false, taskExecution, annotatedListener); } /** * Verify that if a bean has a @FailedTask annotation present that the associated * method is called. */ @Test public void testAnnotationFail() { RuntimeException exception = new RuntimeException(EXCEPTION_MESSAGE); setupContextForAnnotatedListener(); SpringApplication application = new SpringApplication(); DefaultAnnotationConfiguration.AnnotatedTaskListener annotatedListener = this.context .getBean(DefaultAnnotationConfiguration.AnnotatedTaskListener.class); this.context.publishEvent(new ApplicationFailedEvent(application, new String[0], this.context, exception)); this.context .publishEvent(new ApplicationReadyEvent(application, new String[0], this.context, Duration.ofSeconds(50))); TaskExecution taskExecution = new TaskExecution(0, 1, "wombat", LocalDateTime.now(), LocalDateTime.now(), null, new ArrayList<>(), null, null); verifyListenerResults(true, true, taskExecution, annotatedListener); } private void verifyListenerResults(boolean isTaskEnd, boolean isTaskFailed, TaskExecution taskExecution, TestListener actualListener) { assertThat(actualListener.isTaskStartup()).isTrue(); assertThat(actualListener.isTaskEnd()).isEqualTo(isTaskEnd); assertThat(actualListener.isTaskFailed()).isEqualTo(isTaskFailed); if (isTaskFailed) { assertThat(actualListener.getTaskExecution().getExitMessage()).isEqualTo(TestListener.END_MESSAGE); assertThat(actualListener.getThrowable()).isNotNull(); assertThat(actualListener.getThrowable() instanceof RuntimeException).isTrue(); assertThat(actualListener.getTaskExecution() .getErrorMessage() .startsWith("java.lang.RuntimeException: This was expected")).isTrue(); } else if (isTaskEnd) { assertThat(actualListener.getTaskExecution().getExitMessage()).isEqualTo(TestListener.END_MESSAGE); assertThat(actualListener.getTaskExecution().getErrorMessage()).isEqualTo(taskExecution.getErrorMessage()); assertThat(actualListener.getThrowable()).isNull(); } else { assertThat(actualListener.getTaskExecution().getExitMessage()).isEqualTo(TestListener.START_MESSAGE); assertThat(actualListener.getTaskExecution().getErrorMessage()).isNull(); assertThat(actualListener.getThrowable()).isNull(); } assertThat(actualListener.getTaskExecution().getExecutionId()).isEqualTo(taskExecution.getExecutionId()); assertThat(actualListener.getTaskExecution().getExitCode()).isEqualTo(taskExecution.getExitCode()); assertThat(actualListener.getTaskExecution().getExternalExecutionId()) .isEqualTo(taskExecution.getExternalExecutionId()); } private void setupContextForTaskExecutionListener() { this.context = new AnnotationConfigApplicationContext(DefaultTaskListenerConfiguration.class, TestDefaultConfiguration.class, PropertyPlaceholderAutoConfiguration.class); this.context.setId("testTask"); } private void setupContextForAnnotatedListener() { this.context = new AnnotationConfigApplicationContext(TestDefaultConfiguration.class, DefaultAnnotationConfiguration.class, PropertyPlaceholderAutoConfiguration.class); this.context.setId("annotatedTask"); } private void setupContextForBeforeTaskErrorAnnotatedListener() { this.context = new AnnotationConfigApplicationContext(TestDefaultConfiguration.class, BeforeTaskErrorAnnotationConfiguration.class, PropertyPlaceholderAutoConfiguration.class); this.context.setId("beforeTaskAnnotatedTask"); } private void setupContextForFailedTaskErrorAnnotatedListener() { this.context = new AnnotationConfigApplicationContext(TestDefaultConfiguration.class, FailedTaskErrorAnnotationConfiguration.class, PropertyPlaceholderAutoConfiguration.class); this.context.setId("failedTaskAnnotatedTask"); } private void setupContextForAfterTaskErrorAnnotatedListener() { this.context = new AnnotationConfigApplicationContext(TestDefaultConfiguration.class, AfterTaskErrorAnnotationConfiguration.class, PropertyPlaceholderAutoConfiguration.class); this.context.setId("afterTaskAnnotatedTask"); } @Configuration public static class DefaultAnnotationConfiguration { @Bean public AnnotatedTaskListener annotatedTaskListener() { return new AnnotatedTaskListener(); } public static class AnnotatedTaskListener extends TestListener { @BeforeTask public void methodA(TaskExecution taskExecution) { this.isTaskStartup = true; this.taskExecution = taskExecution; this.taskExecution.setExitMessage(START_MESSAGE); } @AfterTask public void methodB(TaskExecution taskExecution) { this.isTaskEnd = true; this.taskExecution = taskExecution; this.taskExecution.setExitMessage(END_MESSAGE); } @FailedTask public void methodC(TaskExecution taskExecution, Throwable throwable) { this.isTaskFailed = true; this.taskExecution = taskExecution; this.throwable = throwable; this.taskExecution.setExitMessage(ERROR_MESSAGE); } } } @Configuration public static class BeforeTaskErrorAnnotationConfiguration { @Bean public AnnotatedTaskListener annotatedTaskListener() { return new AnnotatedTaskListener(); } @Bean public CommandLineRunner commandLineRunner() { return args -> System.out.println("I was run"); } public static class AnnotatedTaskListener { @BeforeTask public void methodA(TaskExecution taskExecution) { beforeTaskDidFireOnError = true; throw new TaskExecutionException("BeforeTaskFailure"); } @AfterTask public void methodB(TaskExecution taskExecution) { endTaskDidFireOnError = true; } @FailedTask public void methodC(TaskExecution taskExecution, Throwable throwable) { failedTaskDidFireOnError = true; } } } @Configuration public static class FailedTaskErrorAnnotationConfiguration { @Bean public AnnotatedTaskListener annotatedTaskListener() { return new AnnotatedTaskListener(); } public static class AnnotatedTaskListener { @BeforeTask public void methodA(TaskExecution taskExecution) { beforeTaskDidFireOnError = true; throw new TaskExecutionException("BeforeTaskFailure"); } @AfterTask public void methodB(TaskExecution taskExecution) { endTaskDidFireOnError = true; } @FailedTask public void methodC(TaskExecution taskExecution, Throwable throwable) { failedTaskDidFireOnError = true; throw new TaskExecutionException("FailedTaskFailure"); } } } @Configuration public static class AfterTaskErrorAnnotationConfiguration { @Bean public AnnotatedTaskListener annotatedTaskListener() { return new AnnotatedTaskListener(); } public static class AnnotatedTaskListener extends TestListener { @BeforeTask public void methodA(TaskExecution taskExecution) { this.isTaskStartup = true; } @AfterTask public void methodB(TaskExecution taskExecution) { this.isTaskEnd = true; this.taskExecution = taskExecution; this.taskExecution.setExitMessage(END_MESSAGE); throw new TaskExecutionException("AfterTaskFailure"); } } } @Configuration public static class DefaultTaskListenerConfiguration { @Bean public TestTaskExecutionListener taskExecutionListener() { return new TestTaskExecutionListener(); } public static class TestTaskExecutionListener extends TestListener implements TaskExecutionListener { @Override public void onTaskStartup(TaskExecution taskExecution) { this.isTaskStartup = true; this.taskExecution = taskExecution; this.taskExecution.setExitMessage(START_MESSAGE); } @Override public void onTaskEnd(TaskExecution taskExecution) { this.isTaskEnd = true; this.taskExecution = taskExecution; this.taskExecution.setExitMessage(END_MESSAGE); } @Override public void onTaskFailed(TaskExecution taskExecution, Throwable throwable) { this.isTaskFailed = true; this.taskExecution = taskExecution; this.throwable = throwable; this.taskExecution.setExitMessage(ERROR_MESSAGE); } } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/listener/TaskLifecycleListenerTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import io.micrometer.observation.Observation; import io.micrometer.observation.ObservationRegistry; import io.micrometer.observation.tck.TestObservationRegistry; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.boot.ApplicationArguments; import org.springframework.boot.ExitCodeEvent; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.context.event.ApplicationFailedEvent; import org.springframework.boot.context.event.ApplicationReadyEvent; import org.springframework.boot.test.system.CapturedOutput; import org.springframework.boot.test.system.OutputCaptureExtension; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.util.TestDefaultConfiguration; import org.springframework.context.ApplicationContextException; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.env.ConfigurableEnvironment; import org.springframework.core.env.MapPropertySource; import org.springframework.core.env.MutablePropertySources; import org.springframework.core.env.StandardEnvironment; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Sort; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; /** * Verifies that the TaskLifecycleListener Methods record the appropriate log header * entries and result codes. * * @author Glenn Renfro * @author Michael Minella */ @ExtendWith(OutputCaptureExtension.class) public class TaskLifecycleListenerTests { private AnnotationConfigApplicationContext context; private TaskExplorer taskExplorer; @BeforeEach public void setUp() { this.context = new AnnotationConfigApplicationContext(); this.context.setId("testTask"); this.context.register(TestDefaultConfiguration.class, PropertyPlaceholderAutoConfiguration.class); TestListener.getStartupOrderList().clear(); TestListener.getFailOrderList().clear(); TestListener.getEndOrderList().clear(); } @AfterEach public void tearDown() { if (this.context != null && this.context.isActive()) { this.context.close(); } ObservationRegistry observationRegistry = TestObservationRegistry.create(); if (observationRegistry.getCurrentObservationScope() != null) { Observation.Scope scope = observationRegistry.getCurrentObservationScope(); scope.close(); scope.getCurrentObservation().stop(); } } @Test public void testTaskCreate() { this.context.refresh(); this.taskExplorer = this.context.getBean(TaskExplorer.class); verifyTaskExecution(0, false); } @Test public void testTaskCreateWithArgs() { this.context.register(ArgsConfiguration.class); this.context.refresh(); this.taskExplorer = this.context.getBean(TaskExplorer.class); verifyTaskExecution(2, false); } @Test public void testTaskUpdate() { this.context.refresh(); this.taskExplorer = this.context.getBean(TaskExplorer.class); this.context.publishEvent(new ApplicationReadyEvent(new SpringApplication(), new String[0], this.context, Duration.ofSeconds(50))); verifyTaskExecution(0, true, 0); } @Test public void testTaskFailedUpdate() { this.context.refresh(); RuntimeException exception = new RuntimeException("This was expected"); SpringApplication application = new SpringApplication(); this.taskExplorer = this.context.getBean(TaskExplorer.class); this.context.publishEvent(new ApplicationFailedEvent(application, new String[0], this.context, exception)); this.context .publishEvent(new ApplicationReadyEvent(application, new String[0], this.context, Duration.ofSeconds(50))); verifyTaskExecution(0, true, 1, exception, null); } @Test public void testTaskFailedWithExitCodeEvent() { final int exitCode = 10; this.context.register(TestListener.class); this.context.register(TestListener2.class); this.context.refresh(); RuntimeException exception = new RuntimeException("This was expected"); SpringApplication application = new SpringApplication(); this.taskExplorer = this.context.getBean(TaskExplorer.class); this.context.publishEvent(new ExitCodeEvent(this.context, exitCode)); this.context.publishEvent(new ApplicationFailedEvent(application, new String[0], this.context, exception)); this.context .publishEvent(new ApplicationReadyEvent(application, new String[0], this.context, Duration.ofSeconds(50))); verifyTaskExecution(0, true, exitCode, exception, null); assertThat(TestListener.getStartupOrderList().size()).isEqualTo(2); assertThat(TestListener.getStartupOrderList().get(0)).isEqualTo(Integer.valueOf(2)); assertThat(TestListener.getStartupOrderList().get(1)).isEqualTo(Integer.valueOf(1)); assertThat(TestListener.getEndOrderList().size()).isEqualTo(2); assertThat(TestListener.getEndOrderList().get(0)).isEqualTo(Integer.valueOf(1)); assertThat(TestListener.getEndOrderList().get(1)).isEqualTo(Integer.valueOf(2)); assertThat(TestListener.getFailOrderList().size()).isEqualTo(2); assertThat(TestListener.getFailOrderList().get(0)).isEqualTo(Integer.valueOf(1)); assertThat(TestListener.getFailOrderList().get(1)).isEqualTo(Integer.valueOf(2)); } @Test public void testNoClosingOfContext() { try (ConfigurableApplicationContext applicationContext = SpringApplication.run( new Class[] { TestDefaultConfiguration.class, PropertyPlaceholderAutoConfiguration.class }, new String[] { "--spring.cloud.task.closecontext_enabled=false" })) { assertThat(applicationContext.isActive()).isTrue(); } } @Test public void testInvalidTaskExecutionId() { assertThatExceptionOfType(ApplicationContextException.class).isThrownBy(() -> { ConfigurableEnvironment environment = new StandardEnvironment(); MutablePropertySources propertySources = environment.getPropertySources(); Map myMap = new HashMap<>(); myMap.put("spring.cloud.task.executionid", "55"); propertySources.addFirst(new MapPropertySource("EnvrionmentTestPropsource", myMap)); this.context.setEnvironment(environment); this.context.refresh(); }); } @Test public void testRestartExistingTask(CapturedOutput capturedOutput) { this.context.refresh(); TaskLifecycleListener taskLifecycleListener = this.context.getBean(TaskLifecycleListener.class); taskLifecycleListener.start(); String output = capturedOutput.toString(); assertThat(output.contains("Multiple start events have been received")) .as("Test results do not show error message: " + output) .isTrue(); } @Test public void testExternalExecutionId() { ConfigurableEnvironment environment = new StandardEnvironment(); MutablePropertySources propertySources = environment.getPropertySources(); Map myMap = new HashMap<>(); myMap.put("spring.cloud.task.external-execution-id", "myid"); propertySources.addFirst(new MapPropertySource("EnvrionmentTestPropsource", myMap)); this.context.setEnvironment(environment); this.context.refresh(); this.taskExplorer = this.context.getBean(TaskExplorer.class); verifyTaskExecution(0, false, null, null, "myid"); } @Test public void testParentExecutionId() { ConfigurableEnvironment environment = new StandardEnvironment(); MutablePropertySources propertySources = environment.getPropertySources(); Map myMap = new HashMap<>(); myMap.put("spring.cloud.task.parentExecutionId", 789); propertySources.addFirst(new MapPropertySource("EnvrionmentTestPropsource", myMap)); this.context.setEnvironment(environment); this.context.refresh(); this.taskExplorer = this.context.getBean(TaskExplorer.class); verifyTaskExecution(0, false, null, null, null, 789L); } private void verifyTaskExecution(int numberOfParams, boolean update, Integer exitCode) { verifyTaskExecution(numberOfParams, update, exitCode, null, null); } private void verifyTaskExecution(int numberOfParams, boolean update) { verifyTaskExecution(numberOfParams, update, null, null, null); } private void verifyTaskExecution(int numberOfParams, boolean update, Integer exitCode, Throwable exception, String externalExecutionId) { verifyTaskExecution(numberOfParams, update, exitCode, exception, externalExecutionId, null); } private void verifyTaskExecution(int numberOfParams, boolean update, Integer exitCode, Throwable exception, String externalExecutionId, Long parentExecutionId) { Sort sort = Sort.by("id"); PageRequest request = PageRequest.of(0, Integer.MAX_VALUE, sort); Page taskExecutionsByName = this.taskExplorer.findTaskExecutionsByName("testTask", request); assertThat(taskExecutionsByName.iterator().hasNext()).isTrue(); TaskExecution taskExecution = taskExecutionsByName.iterator().next(); assertThat(taskExecution.getArguments().size()).isEqualTo(numberOfParams); assertThat(taskExecution.getExitCode()).isEqualTo(exitCode); assertThat(taskExecution.getExternalExecutionId()).isEqualTo(externalExecutionId); assertThat(taskExecution.getParentExecutionId()).isEqualTo(parentExecutionId); if (exception != null) { assertThat(taskExecution.getErrorMessage().length() > exception.getStackTrace().length).isTrue(); } else { assertThat(taskExecution.getExitMessage()).isNull(); } if (update) { assertThat(taskExecution.getEndTime().isAfter(taskExecution.getStartTime()) || taskExecution.getEndTime().isEqual(taskExecution.getStartTime())) .isTrue(); assertThat(taskExecution.getExitCode()).isNotNull(); } else { assertThat(taskExecution.getEndTime()).isNull(); assertThat(taskExecution.getExitCode() == null).isTrue(); } assertThat(taskExecution.getTaskName()).isEqualTo("testTask"); } @Configuration public static class ArgsConfiguration { @Bean public ApplicationArguments args() { Map args = new HashMap<>(2); args.put("foo", "bar"); args.put("baz", "qux"); return new SimpleApplicationArgs(args); } } private static class SimpleApplicationArgs implements ApplicationArguments { private Map args; SimpleApplicationArgs(Map args) { this.args = args; } @Override public String[] getSourceArgs() { String[] sourceArgs = new String[this.args.size()]; int i = 0; for (Map.Entry stringStringEntry : this.args.entrySet()) { sourceArgs[i] = "--" + stringStringEntry.getKey() + "=" + stringStringEntry.getValue(); i++; } return sourceArgs; } @Override public Set getOptionNames() { return this.args.keySet(); } @Override public boolean containsOption(String s) { return this.args.containsKey(s); } @Override public List getOptionValues(String s) { return Collections.singletonList(this.args.get(s)); } @Override public List getNonOptionArgs() { throw new UnsupportedOperationException("Not supported at this time."); } } private static final class TestListener2 extends TestListener { } private static class TestListener implements TaskExecutionListener { static List startupOrderList = new ArrayList<>(); static List endOrderList = new ArrayList<>(); static List failOrderList = new ArrayList<>(); private static int currentCount = 0; private int id = 0; TestListener() { currentCount++; this.id = currentCount; } public static List getStartupOrderList() { return startupOrderList; } public static List getEndOrderList() { return endOrderList; } public static List getFailOrderList() { return failOrderList; } @Override public void onTaskStartup(TaskExecution taskExecution) { startupOrderList.add(this.id); } @Override public void onTaskEnd(TaskExecution taskExecution) { endOrderList.add(this.id); } @Override public void onTaskFailed(TaskExecution taskExecution, Throwable throwable) { failOrderList.add(this.id); } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/listener/TaskListenerExecutorObjectFactoryTests.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import java.util.ArrayList; import java.util.List; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.listener.annotation.AfterTask; import org.springframework.cloud.task.listener.annotation.BeforeTask; import org.springframework.cloud.task.listener.annotation.FailedTask; import org.springframework.cloud.task.listener.annotation.TaskListenerExecutor; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import static org.assertj.core.api.Assertions.assertThat; /** * Verifies that the {@link TaskListenerExecutorObjectFactory} retrieves the * {@link TaskListenerExecutor}. * * @author Glenn Renfro * @author Isik Erhan * @since 2.1.0 */ @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = { TaskListenerExecutorObjectFactoryTests.TaskExecutionListenerConfiguration.class }) @DirtiesContext public class TaskListenerExecutorObjectFactoryTests { /** * Task name constant for the Before TaskListener tests. */ public static final String BEFORE_LISTENER = "BEFORE LISTENER"; /** * Task name constant for the After TaskListener tests. */ public static final String AFTER_LISTENER = "AFTER LISTENER"; /** * Task name constant for the Fail TaskListener tests. */ public static final String FAIL_LISTENER = "FAIL LISTENER"; /** * Collection of the task execution listeners that were fired. */ public static List taskExecutionListenerResults = new ArrayList<>(3); private TaskListenerExecutor taskListenerExecutor; private TaskListenerExecutorObjectFactory taskListenerExecutorObjectFactory; public void setup(ConfigurableApplicationContext context) { taskExecutionListenerResults.clear(); this.taskListenerExecutorObjectFactory = new TaskListenerExecutorObjectFactory(context); this.taskListenerExecutor = this.taskListenerExecutorObjectFactory.getObject(); } @Test public void verifyTaskStartupListener() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskExecutionListenerConfiguration.class); applicationContextRunner.run((context) -> { setup(context); this.taskListenerExecutor.onTaskStartup(createSampleTaskExecution(BEFORE_LISTENER)); validateSingleEntry(BEFORE_LISTENER); }); } @Test public void verifyTaskFailedListener() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskExecutionListenerConfiguration.class); applicationContextRunner.run((context) -> { setup(context); this.taskListenerExecutor.onTaskFailed(createSampleTaskExecution(FAIL_LISTENER), new IllegalStateException("oops")); validateSingleEntry(FAIL_LISTENER); }); } @Test public void verifyTaskEndListener() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskExecutionListenerConfiguration.class); applicationContextRunner.run((context) -> { setup(context); this.taskListenerExecutor.onTaskEnd(createSampleTaskExecution(AFTER_LISTENER)); validateSingleEntry(AFTER_LISTENER); }); } @Test public void verifyAllListener() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskExecutionListenerConfiguration.class); applicationContextRunner.run((context) -> { setup(context); this.taskListenerExecutor.onTaskStartup(createSampleTaskExecution(BEFORE_LISTENER)); this.taskListenerExecutor.onTaskFailed(createSampleTaskExecution(FAIL_LISTENER), new IllegalStateException("oops")); this.taskListenerExecutor.onTaskEnd(createSampleTaskExecution(AFTER_LISTENER)); assertThat(taskExecutionListenerResults.size()).isEqualTo(3); assertThat(taskExecutionListenerResults.get(0).getTaskName()).isEqualTo(BEFORE_LISTENER); assertThat(taskExecutionListenerResults.get(1).getTaskName()).isEqualTo(FAIL_LISTENER); assertThat(taskExecutionListenerResults.get(2).getTaskName()).isEqualTo(AFTER_LISTENER); }); } @Test public void verifyTaskStartupListenerWithMultipleInstances() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskExecutionListenerMultipleInstanceConfiguration.class); applicationContextRunner.run((context) -> { setup(context); this.taskListenerExecutor.onTaskStartup(createSampleTaskExecution(BEFORE_LISTENER)); validateSingleEventWithMultipleInstances(BEFORE_LISTENER); }); } @Test public void verifyTaskFailedListenerWithMultipleInstances() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskExecutionListenerMultipleInstanceConfiguration.class); applicationContextRunner.run((context) -> { setup(context); this.taskListenerExecutor.onTaskFailed(createSampleTaskExecution(FAIL_LISTENER), new IllegalStateException("oops")); validateSingleEventWithMultipleInstances(FAIL_LISTENER); }); } @Test public void verifyTaskEndListenerWithMultipleInstances() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskExecutionListenerMultipleInstanceConfiguration.class); applicationContextRunner.run((context) -> { setup(context); this.taskListenerExecutor.onTaskEnd(createSampleTaskExecution(AFTER_LISTENER)); validateSingleEventWithMultipleInstances(AFTER_LISTENER); }); } @Test public void verifyAllListenerWithMultipleInstances() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TaskExecutionListenerMultipleInstanceConfiguration.class); applicationContextRunner.run((context) -> { setup(context); this.taskListenerExecutor.onTaskStartup(createSampleTaskExecution(BEFORE_LISTENER)); this.taskListenerExecutor.onTaskFailed(createSampleTaskExecution(FAIL_LISTENER), new IllegalStateException("oops")); this.taskListenerExecutor.onTaskEnd(createSampleTaskExecution(AFTER_LISTENER)); assertThat(taskExecutionListenerResults.size()).isEqualTo(6); assertThat(taskExecutionListenerResults.get(0).getTaskName()).isEqualTo(BEFORE_LISTENER); assertThat(taskExecutionListenerResults.get(1).getTaskName()).isEqualTo(BEFORE_LISTENER); assertThat(taskExecutionListenerResults.get(2).getTaskName()).isEqualTo(FAIL_LISTENER); assertThat(taskExecutionListenerResults.get(3).getTaskName()).isEqualTo(FAIL_LISTENER); assertThat(taskExecutionListenerResults.get(4).getTaskName()).isEqualTo(AFTER_LISTENER); assertThat(taskExecutionListenerResults.get(5).getTaskName()).isEqualTo(AFTER_LISTENER); }); } private TaskExecution createSampleTaskExecution(String taskName) { TaskExecution taskExecution = new TaskExecution(); taskExecution.setTaskName(taskName); return taskExecution; } private void validateSingleEntry(String event) { assertThat(taskExecutionListenerResults.size()).isEqualTo(1); assertThat(taskExecutionListenerResults.get(0).getTaskName()).isEqualTo(event); } private void validateSingleEventWithMultipleInstances(String event) { assertThat(taskExecutionListenerResults.size()).isEqualTo(2); assertThat(taskExecutionListenerResults).allSatisfy(task -> assertThat(task.getTaskName()).isEqualTo(event)); } @Configuration public static class TaskExecutionListenerConfiguration { @Bean public TaskRunComponent taskRunComponent() { return new TaskRunComponent(); } } @Configuration public static class TaskExecutionListenerMultipleInstanceConfiguration { @Bean public TaskRunComponent taskRunComponent() { return new TaskRunComponent(); } @Bean public TaskRunComponent otherTaskRunComponent() { return new TaskRunComponent(); } } public static class TaskRunComponent { @BeforeTask public void initBeforeListener(TaskExecution taskExecution) { TaskListenerExecutorObjectFactoryTests.taskExecutionListenerResults.add(taskExecution); } @AfterTask public void initAfterListener(TaskExecution taskExecution) { TaskListenerExecutorObjectFactoryTests.taskExecutionListenerResults.add(taskExecution); } @FailedTask public void initFailedListener(TaskExecution taskExecution, Throwable exception) { TaskListenerExecutorObjectFactoryTests.taskExecutionListenerResults.add(taskExecution); } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/micrometer/TaskObservationsTests.java ================================================ /* * Copyright 2019-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.micrometer; import java.time.LocalDateTime; import java.util.ArrayList; import io.micrometer.core.instrument.LongTaskTimer; import io.micrometer.core.instrument.Tags; import io.micrometer.core.instrument.observation.DefaultMeterObservationHandler; import io.micrometer.core.instrument.simple.SimpleMeterRegistry; import io.micrometer.core.tck.MeterRegistryAssert; import io.micrometer.observation.Observation; import io.micrometer.observation.ObservationHandler; import io.micrometer.observation.ObservationRegistry; import io.micrometer.observation.tck.ObservationRegistryAssert; import io.micrometer.observation.tck.TestObservationRegistry; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.configuration.TaskObservationCloudKeyValues; import org.springframework.cloud.task.listener.TaskExecutionObservation; import org.springframework.cloud.task.listener.TaskObservations; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import static org.assertj.core.api.Assertions.assertThat; import static org.springframework.cloud.task.listener.TaskObservations.UNKNOWN; /** * @author Christian Tzolov * @author Glenn Renfro */ public class TaskObservationsTests { /** * Prefix for the spring cloud task project. */ public static final String PREFIX = "spring.cloud.task"; private TaskObservations taskObservations; private SimpleMeterRegistry simpleMeterRegistry; private ObservationRegistry observationRegistry; @BeforeEach public void before() { this.simpleMeterRegistry = new SimpleMeterRegistry(); this.observationRegistry = TestObservationRegistry.create(); ObservationHandler timerObservationHandler = new DefaultMeterObservationHandler( this.simpleMeterRegistry); this.observationRegistry.observationConfig().observationHandler(timerObservationHandler); this.taskObservations = new TaskObservations(this.observationRegistry, null, null); } @AfterEach public void after() { this.simpleMeterRegistry.clear(); ObservationRegistryAssert.assertThat(this.observationRegistry).doesNotHaveAnyRemainingCurrentObservation(); } @Test public void successfulTaskTest() { TaskExecution taskExecution = startupObservationForBasicTests("myTask72", 123L); LongTaskTimer longTaskTimer = initializeBasicTest("myTask72", "123"); // Finish Task taskObservations.onTaskEnd(taskExecution); verifyDefaultKeyValues(); TaskExecutionObservation.TASK_ACTIVE.getDefaultConvention(); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags("spring.cloud.task", Tags .of(TaskExecutionObservation.TaskKeyValues.TASK_STATUS.asString(), TaskObservations.STATUS_SUCCESS)); verifyLongTaskTimerAfterStop(longTaskTimer, "myTask72", "123"); } @Test public void defaultTaskTest() { TaskExecution taskExecution = new TaskExecution(123L, 0, null, LocalDateTime.now(), LocalDateTime.now(), null, new ArrayList<>(), null, null, null); // Start Task taskObservations.onTaskStartup(taskExecution); LongTaskTimer longTaskTimer = initializeBasicTest(UNKNOWN, "123"); // Finish Task taskObservations.onTaskEnd(taskExecution); // Test Timer MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_NAME.asString(), UNKNOWN)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_EXECUTION_ID.asString(), "123")); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_PARENT_EXECUTION_ID.asString(), UNKNOWN)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_EXTERNAL_EXECUTION_ID.asString(), UNKNOWN)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_EXIT_CODE.asString(), "0")); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_STATUS.asString(), TaskObservations.STATUS_SUCCESS)); verifyLongTaskTimerAfterStop(longTaskTimer, "unknown", "123"); } @Test public void failingTask() { TaskExecution taskExecution = startupObservationForBasicTests("myTask72", 123L); LongTaskTimer longTaskTimer = initializeBasicTest("myTask72", "123"); taskObservations.onTaskFailed(new RuntimeException("Test")); // Finish Task. TaskLifecycleListen calls onTaskEnd after the onTaskFailed. Make // sure that the counter status // is not affected by this. taskExecution.setExitCode(1); taskObservations.onTaskEnd(taskExecution); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_NAME.asString(), "myTask72")); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_EXECUTION_ID.asString(), "123")); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_PARENT_EXECUTION_ID.asString(), "-1")); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_EXIT_CODE.asString(), "1")); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_STATUS.asString(), TaskObservations.STATUS_FAILURE)); verifyLongTaskTimerAfterStop(longTaskTimer, "myTask72", "123"); } @Test public void taskWithCloudKeyValues() { final String APPLICATION_ID = "123"; final String APPLICATION_NAME = "APP123"; final String SPACE_ID = "123"; final String SPACE_NAME = "SPACE123"; final String APPLICATION_VERSION = "APPV123"; final String INSTANCE_INDEX = "55"; final String ORGANIZATION_NAME = "ORG123"; TaskObservationCloudKeyValues taskObservationCloudKeyValues = new TaskObservationCloudKeyValues(); taskObservationCloudKeyValues.setApplicationId(APPLICATION_ID); taskObservationCloudKeyValues.setApplicationName(APPLICATION_NAME); taskObservationCloudKeyValues.setSpaceId(SPACE_ID); taskObservationCloudKeyValues.setSpaceName(SPACE_NAME); taskObservationCloudKeyValues.setApplicationVersion(APPLICATION_VERSION); taskObservationCloudKeyValues.setInstanceIndex(INSTANCE_INDEX); taskObservationCloudKeyValues.setOrganizationName(ORGANIZATION_NAME); this.taskObservations = new TaskObservations(this.observationRegistry, taskObservationCloudKeyValues, null); TaskExecution taskExecution = startupObservationForBasicTests("myTask72", 123L); LongTaskTimer longTaskTimer = initializeBasicTest("myTask72", "123"); // Finish Task taskObservations.onTaskEnd(taskExecution); verifyDefaultKeyValues(); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_ORG_NAME.asString(), ORGANIZATION_NAME)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_SPACE_ID.asString(), SPACE_ID)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_SPACE_NAME.asString(), SPACE_NAME)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_APP_NAME.asString(), APPLICATION_NAME)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_APP_ID.asString(), APPLICATION_ID)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags .of(TaskExecutionObservation.TaskKeyValues.TASK_CF_APP_VERSION.asString(), APPLICATION_VERSION)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_INSTANCE_INDEX.asString(), INSTANCE_INDEX)); // Test Timer MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_NAME.asString(), "myTask72")); verifyLongTaskTimerAfterStop(longTaskTimer, "myTask72", "123"); } @Test public void testCloudVariablesUninitialized() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(CloudConfigurationForDefaultValues.class)); applicationContextRunner.run((context) -> { TaskObservationCloudKeyValues taskObservationCloudKeyValues = context .getBean(TaskObservationCloudKeyValues.class); assertThat(taskObservationCloudKeyValues).as("taskObservationCloudKeyValues should not be null") .isNotNull(); this.taskObservations = new TaskObservations(this.observationRegistry, taskObservationCloudKeyValues, null); TaskExecution taskExecution = startupObservationForBasicTests("myTask72", 123L); LongTaskTimer longTaskTimer = initializeBasicTest("myTask72", "123"); // Finish Task taskObservations.onTaskEnd(taskExecution); verifyDefaultKeyValues(); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_ORG_NAME.asString(), "default")); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_SPACE_ID.asString(), UNKNOWN)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_SPACE_NAME.asString(), UNKNOWN)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_APP_NAME.asString(), UNKNOWN)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_APP_ID.asString(), UNKNOWN)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_APP_VERSION.asString(), UNKNOWN)); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_CF_INSTANCE_INDEX.asString(), "0")); // Test Timer MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_NAME.asString(), "myTask72")); verifyLongTaskTimerAfterStop(longTaskTimer, "myTask72", "123"); }); } private TaskExecution startupObservationForBasicTests(String taskName, long taskExecutionId) { TaskExecution taskExecution = new TaskExecution(taskExecutionId, 0, taskName, LocalDateTime.now(), LocalDateTime.now(), null, new ArrayList<>(), null, "-1", -1L); // Start Task taskObservations.onTaskStartup(taskExecution); return taskExecution; } private LongTaskTimer initializeBasicTest(String taskName, String executionId) { // Test Long Task Timer while the task is running. LongTaskTimer longTaskTimer = simpleMeterRegistry .find(TaskExecutionObservation.TASK_ACTIVE.getPrefix() + ".active") .longTaskTimer(); System.out.println(simpleMeterRegistry.getMetersAsString()); assertThat(longTaskTimer).withFailMessage("LongTask timer should be created on Task start").isNotNull(); assertThat(longTaskTimer.activeTasks()).isEqualTo(1); assertThat(longTaskTimer.getId().getTag(TaskExecutionObservation.TaskKeyValues.TASK_NAME.asString())) .isEqualTo(taskName); assertThat(longTaskTimer.getId().getTag(TaskExecutionObservation.TaskKeyValues.TASK_EXECUTION_ID.asString())) .isEqualTo(executionId); return longTaskTimer; } private void verifyDefaultKeyValues() { // Test Timer MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_NAME.asString(), "myTask72")); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_EXECUTION_ID.asString(), "123")); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_PARENT_EXECUTION_ID.asString(), "-1")); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_EXIT_CODE.asString(), "0")); MeterRegistryAssert.assertThat(this.simpleMeterRegistry) .hasTimerWithNameAndTags(PREFIX, Tags.of(TaskExecutionObservation.TaskKeyValues.TASK_STATUS.asString(), TaskObservations.STATUS_SUCCESS)); } private void verifyLongTaskTimerAfterStop(LongTaskTimer longTaskTimer, String taskName, String executionId) { // Test Long Task Timer after the task has completed. assertThat(longTaskTimer.activeTasks()).isEqualTo(0); assertThat(longTaskTimer.getId().getTag(TaskExecutionObservation.TaskKeyValues.TASK_NAME.asString())) .isEqualTo(taskName); assertThat(longTaskTimer.getId().getTag(TaskExecutionObservation.TaskKeyValues.TASK_EXECUTION_ID.asString())) .isEqualTo(executionId); } @Configuration static class CloudConfigurationForDefaultValues { @Bean public TaskObservationCloudKeyValues taskObservationCloudKeyValues() { return new TaskObservationCloudKeyValues(); } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/H2TaskRepositoryIntegrationTests.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository; import java.util.UUID; import javax.sql.DataSource; import org.h2.engine.Mode.ModeEnum; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.EnumSource; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.jdbc.datasource.SimpleDriverDataSource; import static org.assertj.core.api.Assertions.assertThat; /** * @author Henning Pöttker */ class H2TaskRepositoryIntegrationTests { @ParameterizedTest @EnumSource(ModeEnum.class) void testTaskRepository(ModeEnum mode) { String connectionUrl = String.format("jdbc:h2:mem:%s;DB_CLOSE_DELAY=-1;MODE=%s", UUID.randomUUID(), mode); ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(TestConfiguration.class) .withBean(DataSource.class, () -> new SimpleDriverDataSource(new org.h2.Driver(), connectionUrl, "sa", "")); applicationContextRunner.run((context -> { TaskExplorer taskExplorer = context.getBean(TaskExplorer.class); assertThat(taskExplorer.getTaskExecutionCount()).isOne(); })); } @EnableTask @ImportAutoConfiguration(SimpleTaskAutoConfiguration.class) static class TestConfiguration { } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/MariaDbTaskRepositoryIntegrationTests.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository; import javax.sql.DataSource; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.mariadb.jdbc.MariaDbDataSource; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; import org.testcontainers.mariadb.MariaDBContainer; import org.testcontainers.utility.DockerImageName; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.core.io.ClassPathResource; import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import static org.assertj.core.api.Assertions.assertThat; @Tag("DockerRequired") @Testcontainers @SpringJUnitConfig public class MariaDbTaskRepositoryIntegrationTests { private static final DockerImageName MARIADB_IMAGE = DockerImageName.parse("mariadb:10.9.3"); /** * Provide a mariadb test container for tests. */ @Container public static MariaDBContainer mariaDBContainer = new MariaDBContainer(MARIADB_IMAGE); @Test public void testTaskExplorer() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withUserConfiguration(MariaDbTaskRepositoryIntegrationTests.TestConfiguration.class); applicationContextRunner.run((context -> { TaskExplorer taskExplorer = context.getBean(TaskExplorer.class); assertThat(taskExplorer.getTaskExecutionCount()).isOne(); })); applicationContextRunner.run((context -> { TaskExplorer taskExplorer = context.getBean(TaskExplorer.class); assertThat(taskExplorer.getTaskExecutionCount()).isEqualTo(2); })); } @EnableTask @ImportAutoConfiguration(SimpleTaskAutoConfiguration.class) static class TestConfiguration { public static boolean firstTime = true; @Bean public DataSource dataSource() throws Exception { MariaDbDataSource datasource = new MariaDbDataSource(); datasource.setUrl(mariaDBContainer.getJdbcUrl()); datasource.setUser(mariaDBContainer.getUsername()); datasource.setPassword(mariaDBContainer.getPassword()); if (firstTime) { ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); databasePopulator .addScript(new ClassPathResource("/org/springframework/cloud/task/schema-mariadb.sql")); databasePopulator.execute(datasource); firstTime = false; } return datasource; } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/dao/BaseTaskExecutionDaoTestCases.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.dao; import java.time.LocalDateTime; import java.util.List; import org.junit.jupiter.api.Test; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.test.annotation.DirtiesContext; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; /** * Defines test cases that shall be between {@link TaskExecutionDao} tests. * * @author Gunnar Hillert */ public abstract class BaseTaskExecutionDaoTestCases { protected TaskExecutionDao dao; @Test @DirtiesContext public void getLatestTaskExecutionsByTaskNamesWithNullParameter() { try { this.dao.getLatestTaskExecutionsByTaskNames(null); } catch (IllegalArgumentException e) { assertThat(e.getMessage()).isEqualTo("At least 1 task name must be provided."); return; } fail("Expected an IllegalArgumentException to be thrown."); } @Test @DirtiesContext public void getLatestTaskExecutionsByTaskNamesWithEmptyArrayParameter() { try { this.dao.getLatestTaskExecutionsByTaskNames(new String[0]); } catch (IllegalArgumentException e) { assertThat(e.getMessage()).isEqualTo("At least 1 task name must be provided."); return; } fail("Expected an IllegalArgumentException to be thrown."); } @Test @DirtiesContext public void getLatestTaskExecutionsByTaskNamesWithArrayParametersContainingNullAndEmptyValues() { try { this.dao.getLatestTaskExecutionsByTaskNames("foo", null, "bar", " "); } catch (IllegalArgumentException e) { assertThat(e.getMessage()) .isEqualTo("Task names must not contain any empty elements but 2 of 4 were empty or null."); return; } fail("Expected an IllegalArgumentException to be thrown."); } @Test @DirtiesContext public void getLatestTaskExecutionsByTaskNamesWithSingleTaskName() { initializeRepositoryNotInOrderWithMultipleTaskExecutions(); final List latestTaskExecutions = this.dao.getLatestTaskExecutionsByTaskNames("FOO1"); assertThat(latestTaskExecutions.size() == 1) .as("Expected only 1 taskExecution but got " + latestTaskExecutions.size()) .isTrue(); final TaskExecution lastTaskExecution = latestTaskExecutions.get(0); assertThat(lastTaskExecution.getTaskName()).isEqualTo("FOO1"); assertThat(lastTaskExecution.getStartTime().getYear()).isEqualTo(2015); assertThat(lastTaskExecution.getStartTime().getMonthValue()).isEqualTo(2); assertThat(lastTaskExecution.getStartTime().getDayOfMonth()).isEqualTo(22); assertThat(lastTaskExecution.getStartTime().getHour()).isEqualTo(23); assertThat(lastTaskExecution.getStartTime().getMinute()).isEqualTo(59); assertThat(lastTaskExecution.getStartTime().getSecond()).isEqualTo(0); } @Test @DirtiesContext public void getLatestTaskExecutionsByTaskNamesWithMultipleTaskNames() { initializeRepositoryNotInOrderWithMultipleTaskExecutions(); final List latestTaskExecutions = this.dao.getLatestTaskExecutionsByTaskNames("FOO1", "FOO3", "FOO4"); assertThat(latestTaskExecutions.size() == 3) .as("Expected 3 taskExecutions but got " + latestTaskExecutions.size()) .isTrue(); LocalDateTime startDateTime = latestTaskExecutions.get(0).getStartTime(); assertThat(startDateTime.getYear()).isEqualTo(2016); assertThat(startDateTime.getMonthValue()).isEqualTo(8); assertThat(startDateTime.getDayOfMonth()).isEqualTo(20); assertThat(startDateTime.getHour()).isEqualTo(14); assertThat(startDateTime.getMinute()).isEqualTo(45); assertThat(startDateTime.getSecond()).isEqualTo(0); LocalDateTime startDateTimeOne = latestTaskExecutions.get(1).getStartTime(); assertThat(startDateTimeOne.getYear()).isEqualTo(2015); assertThat(startDateTimeOne.getMonthValue()).isEqualTo(2); assertThat(startDateTimeOne.getDayOfMonth()).isEqualTo(22); assertThat(startDateTimeOne.getHour()).isEqualTo(23); assertThat(startDateTimeOne.getMinute()).isEqualTo(59); assertThat(startDateTimeOne.getSecond()).isEqualTo(0); LocalDateTime startDateTimeTwo = latestTaskExecutions.get(2).getStartTime(); assertThat(startDateTimeTwo.getYear()).isEqualTo(2015); assertThat(startDateTimeTwo.getMonthValue()).isEqualTo(2); assertThat(startDateTimeTwo.getDayOfMonth()).isEqualTo(20); assertThat(startDateTimeTwo.getHour()).isEqualTo(14); assertThat(startDateTimeTwo.getMinute()).isEqualTo(45); assertThat(startDateTimeTwo.getSecond()).isEqualTo(0); } /** * This test is a special use-case. While not common, it is theoretically possible, * that a task may have executed with the exact same start time multiple times. In * that case we should still only get 1 returned {@link TaskExecution}. */ @Test @DirtiesContext public void getLatestTaskExecutionsByTaskNamesWithIdenticalTaskExecutions() { long executionIdOffset = initializeRepositoryNotInOrderWithMultipleTaskExecutions(); final List latestTaskExecutions = this.dao.getLatestTaskExecutionsByTaskNames("FOO5"); assertThat(latestTaskExecutions.size() == 1) .as("Expected only 1 taskExecution but got " + latestTaskExecutions.size()) .isTrue(); LocalDateTime startDateTime = latestTaskExecutions.get(0).getStartTime(); assertThat(startDateTime.getYear()).isEqualTo(2015); assertThat(startDateTime.getMonthValue()).isEqualTo(2); assertThat(startDateTime.getDayOfMonth()).isEqualTo(22); assertThat(startDateTime.getHour()).isEqualTo(23); assertThat(startDateTime.getMinute()).isEqualTo(59); assertThat(startDateTime.getSecond()).isEqualTo(0); assertThat(latestTaskExecutions.get(0).getExecutionId()).isEqualTo(9 + executionIdOffset); } @Test @DirtiesContext public void getLatestTaskExecutionForTaskNameWithNullParameter() { try { this.dao.getLatestTaskExecutionForTaskName(null); } catch (IllegalArgumentException e) { assertThat(e.getMessage()).isEqualTo("The task name must not be empty."); return; } fail("Expected an IllegalArgumentException to be thrown."); } @Test @DirtiesContext public void getLatestTaskExecutionForTaskNameWithEmptyStringParameter() { try { this.dao.getLatestTaskExecutionForTaskName(""); } catch (IllegalArgumentException e) { assertThat(e.getMessage()).isEqualTo("The task name must not be empty."); return; } fail("Expected an IllegalArgumentException to be thrown."); } @Test @DirtiesContext public void getLatestTaskExecutionForNonExistingTaskName() { initializeRepositoryNotInOrderWithMultipleTaskExecutions(); final TaskExecution latestTaskExecution = this.dao.getLatestTaskExecutionForTaskName("Bar5"); assertThat(latestTaskExecution).as("Expected the latestTaskExecution to be null but got" + latestTaskExecution) .isNull(); } @Test @DirtiesContext public void getLatestTaskExecutionForExistingTaskName() { initializeRepositoryNotInOrderWithMultipleTaskExecutions(); final TaskExecution latestTaskExecution = this.dao.getLatestTaskExecutionForTaskName("FOO1"); assertThat(latestTaskExecution).as("Expected the latestTaskExecution not to be null").isNotNull(); LocalDateTime startDateTime = latestTaskExecution.getStartTime(); assertThat(startDateTime.getYear()).isEqualTo(2015); assertThat(startDateTime.getMonthValue()).isEqualTo(2); assertThat(startDateTime.getDayOfMonth()).isEqualTo(22); assertThat(startDateTime.getHour()).isEqualTo(23); assertThat(startDateTime.getMinute()).isEqualTo(59); assertThat(startDateTime.getSecond()).isEqualTo(0); } /** * This test is a special use-case. While not common, it is theoretically possible, * that a task may have executed with the exact same start time multiple times. In * that case we should still only get 1 returned {@link TaskExecution}. */ @Test @DirtiesContext public void getLatestTaskExecutionForTaskNameWithIdenticalTaskExecutions() { long executionIdOffset = initializeRepositoryNotInOrderWithMultipleTaskExecutions(); final TaskExecution latestTaskExecution = this.dao.getLatestTaskExecutionForTaskName("FOO5"); assertThat(latestTaskExecution).as("Expected the latestTaskExecution not to be null").isNotNull(); LocalDateTime startDateTime = latestTaskExecution.getStartTime(); assertThat(startDateTime.getYear()).isEqualTo(2015); assertThat(startDateTime.getMonthValue()).isEqualTo(2); assertThat(startDateTime.getDayOfMonth()).isEqualTo(22); assertThat(startDateTime.getHour()).isEqualTo(23); assertThat(startDateTime.getMinute()).isEqualTo(59); assertThat(startDateTime.getSecond()).isEqualTo(0); assertThat(latestTaskExecution.getExecutionId()).isEqualTo(9 + executionIdOffset); } @Test @DirtiesContext public void getRunningTaskExecutions() { initializeRepositoryNotInOrderWithMultipleTaskExecutions(); assertThat(this.dao.getRunningTaskExecutionCount()).isEqualTo(this.dao.getTaskExecutionCount()); this.dao.completeTaskExecution(1, 0, LocalDateTime.now(), "c'est fini!"); assertThat(this.dao.getRunningTaskExecutionCount()).isEqualTo(this.dao.getTaskExecutionCount() - 1); } protected long initializeRepositoryNotInOrderWithMultipleTaskExecutions() { final TaskExecution foo1_0 = getTaskExecution("FOO1", "externalC"); foo1_0.setStartTime(getDate(2015, 2, 22, 23, 59)); final TaskExecution foo1_1 = getTaskExecution("FOO1", "externalC"); foo1_1.setStartTime(getDate(2015, 2, 20, 14, 45)); final TaskExecution foo1_2 = getTaskExecution("FOO1", "externalC"); foo1_2.setStartTime(getDate(2015, 1, 19, 14, 30)); final TaskExecution foo1_3 = getTaskExecution("FOO1", "externalC"); foo1_3.setStartTime(getDate(2015, 1, 20, 14, 45)); TaskExecution foo2 = getTaskExecution("FOO2", "externalA"); foo2.setStartTime(getDate(2015, 4, 20, 14, 45)); TaskExecution foo3 = getTaskExecution("FOO3", "externalB"); foo3.setStartTime(getDate(2016, 8, 20, 14, 45)); TaskExecution foo4 = getTaskExecution("FOO4", "externalB"); foo4.setStartTime(getDate(2015, 2, 20, 14, 45)); final TaskExecution foo5_0 = getTaskExecution("FOO5", "externalC"); foo5_0.setStartTime(getDate(2015, 2, 22, 23, 59)); final TaskExecution foo5_1 = getTaskExecution("FOO5", "externalC"); foo5_1.setStartTime(getDate(2015, 2, 22, 23, 59)); final TaskExecution foo5_2 = getTaskExecution("FOO5", "externalC"); foo5_2.setStartTime(getDate(2015, 2, 22, 23, 59)); long executionIdOffset = this.createTaskExecution(foo1_0); this.createTaskExecution(foo1_1); this.createTaskExecution(foo1_2); this.createTaskExecution(foo1_3); this.createTaskExecution(foo2); this.createTaskExecution(foo3); this.createTaskExecution(foo4); this.createTaskExecution(foo5_0); this.createTaskExecution(foo5_1); this.createTaskExecution(foo5_2); return executionIdOffset; } private LocalDateTime getDate(int year, int month, int day, int hour, int minute) { return LocalDateTime.now() .withYear(year) .withMonth(month) .withDayOfMonth(day) .withHour(hour) .withMinute(minute) .withSecond(0); } private long createTaskExecution(TaskExecution te) { return this.dao .createTaskExecution(te.getTaskName(), te.getStartTime(), te.getArguments(), te.getExternalExecutionId()) .getExecutionId(); } protected TaskExecution getTaskExecution(String taskName, String externalExecutionId) { TaskExecution taskExecution = new TaskExecution(); taskExecution.setTaskName(taskName); taskExecution.setExternalExecutionId(externalExecutionId); taskExecution.setStartTime(LocalDateTime.now()); return taskExecution; } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/dao/JdbcTaskExecutionDaoMariaDBIntegrationTests.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.dao; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.UUID; import javax.sql.DataSource; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mariadb.jdbc.MariaDbDataSource; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; import org.testcontainers.mariadb.MariaDBContainer; import org.testcontainers.utility.DockerImageName; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.cloud.task.configuration.TestConfiguration; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.cloud.task.util.TestDBUtils; import org.springframework.cloud.task.util.TestVerifierUtils; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.io.ClassPathResource; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * Executes unit integration tests on JdbcTaskExecutionDao for MARIADB. * * @author Glenn Renfro */ @Tag("DockerRequired") @Testcontainers @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = { TestConfiguration.class, JdbcTaskExecutionDaoMariaDBIntegrationTests.TestDataSourceConfiguration.class, PropertyPlaceholderAutoConfiguration.class }) public class JdbcTaskExecutionDaoMariaDBIntegrationTests extends BaseTaskExecutionDaoTestCases { private static final DockerImageName MARIADB_IMAGE = DockerImageName.parse("mariadb:10.9.3"); /** * Provide mariadb test container for tests. */ @Container public static MariaDBContainer mariaDBContainer = new MariaDBContainer(MARIADB_IMAGE); @Autowired TaskRepository repository; @Autowired private DataSource dataSource; @BeforeEach public void setup() { final JdbcTaskExecutionDao dao = new JdbcTaskExecutionDao(this.dataSource); dao.setTaskIncrementer(TestDBUtils.getIncrementer(this.dataSource)); JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); jdbcTemplate.execute("TRUNCATE TABLE TASK_EXECUTION_PARAMS"); jdbcTemplate.execute("DELETE FROM TASK_EXECUTION"); jdbcTemplate.execute("ALTER SEQUENCE TASK_SEQ RESTART;"); super.dao = dao; } @Test @DirtiesContext public void testStartTaskExecution() { TaskExecution expectedTaskExecution = this.dao.createTaskExecution(null, null, new ArrayList<>(0), null); expectedTaskExecution.setArguments(Collections.singletonList("foo=" + UUID.randomUUID().toString())); expectedTaskExecution.setStartTime(LocalDateTime.now()); expectedTaskExecution.setTaskName(UUID.randomUUID().toString()); this.dao.startTaskExecution(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), expectedTaskExecution.getExternalExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, TestDBUtils.getTaskExecutionFromDB(this.dataSource, expectedTaskExecution.getExecutionId())); } @Test @DirtiesContext public void createTaskExecution() { TaskExecution expectedTaskExecution = TestVerifierUtils.createSampleTaskExecutionNoArg(); expectedTaskExecution = this.dao.createTaskExecution(expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), expectedTaskExecution.getExternalExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, TestDBUtils.getTaskExecutionFromDB(this.dataSource, expectedTaskExecution.getExecutionId())); } @Test @DirtiesContext public void createEmptyTaskExecution() { TaskExecution expectedTaskExecution = this.dao.createTaskExecution(null, null, new ArrayList<>(0), null); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, TestDBUtils.getTaskExecutionFromDB(this.dataSource, expectedTaskExecution.getExecutionId())); } @Test @DirtiesContext public void completeTaskExecution() { TaskExecution expectedTaskExecution = TestVerifierUtils.endSampleTaskExecutionNoArg(); expectedTaskExecution = this.dao.createTaskExecution(expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), expectedTaskExecution.getExternalExecutionId()); this.dao.completeTaskExecution(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getExitCode(), expectedTaskExecution.getEndTime(), expectedTaskExecution.getExitMessage()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, TestDBUtils.getTaskExecutionFromDB(this.dataSource, expectedTaskExecution.getExecutionId())); } @Test @DirtiesContext public void completeTaskExecutionWithNoCreate() { JdbcTaskExecutionDao dao = new JdbcTaskExecutionDao(this.dataSource); TaskExecution expectedTaskExecution = TestVerifierUtils.endSampleTaskExecutionNoArg(); assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> { dao.completeTaskExecution(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getExitCode(), expectedTaskExecution.getEndTime(), expectedTaskExecution.getExitMessage()); }); } @Test @DirtiesContext public void testFindAllPageableSort() { initializeRepositoryNotInOrder(); Sort sort = Sort.by(new Sort.Order(Sort.Direction.ASC, "EXTERNAL_EXECUTION_ID")); Iterator iter = getPageIterator(0, 2, sort); TaskExecution taskExecution = iter.next(); assertThat(taskExecution.getTaskName()).isEqualTo("FOO2"); taskExecution = iter.next(); assertThat(taskExecution.getTaskName()).isEqualTo("FOO3"); iter = getPageIterator(1, 2, sort); taskExecution = iter.next(); assertThat(taskExecution.getTaskName()).isEqualTo("FOO1"); } @Test @DirtiesContext public void testFindAllDefaultSort() { initializeRepository(); Iterator iter = getPageIterator(0, 2, null); TaskExecution taskExecution = iter.next(); assertThat(taskExecution.getTaskName()).isEqualTo("FOO1"); taskExecution = iter.next(); assertThat(taskExecution.getTaskName()).isEqualTo("FOO2"); iter = getPageIterator(1, 2, null); taskExecution = iter.next(); assertThat(taskExecution.getTaskName()).isEqualTo("FOO3"); } @Test @DirtiesContext public void testStartExecutionWithNullExternalExecutionIdExisting() { TaskExecution expectedTaskExecution = initializeTaskExecutionWithExternalExecutionId(); this.dao.startTaskExecution(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), null); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, TestDBUtils.getTaskExecutionFromDB(this.dataSource, expectedTaskExecution.getExecutionId())); } @Test @DirtiesContext public void testStartExecutionWithNullExternalExecutionIdNonExisting() { TaskExecution expectedTaskExecution = initializeTaskExecutionWithExternalExecutionId(); this.dao.startTaskExecution(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), "BAR"); expectedTaskExecution.setExternalExecutionId("BAR"); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, TestDBUtils.getTaskExecutionFromDB(this.dataSource, expectedTaskExecution.getExecutionId())); } @Test @DirtiesContext public void testFindRunningTaskExecutions() { initializeRepositoryNotInOrderWithMultipleTaskExecutions(); assertThat( this.dao.findRunningTaskExecutions("FOO1", PageRequest.of(1, Integer.MAX_VALUE, Sort.by("START_TIME"))) .getTotalElements()) .isEqualTo(4); } @Test @DirtiesContext public void testFindRunningTaskExecutionsIllegalSort() { initializeRepositoryNotInOrderWithMultipleTaskExecutions(); assertThatThrownBy(() -> this.dao .findRunningTaskExecutions("FOO1", PageRequest.of(1, Integer.MAX_VALUE, Sort.by("ILLEGAL_SORT"))) .getTotalElements()).isInstanceOf(IllegalArgumentException.class) .hasMessage("Invalid sort option selected: ILLEGAL_SORT"); } @Test @DirtiesContext public void testFindRunningTaskExecutionsSortWithDifferentCase() { initializeRepositoryNotInOrderWithMultipleTaskExecutions(); assertThat( this.dao.findRunningTaskExecutions("FOO1", PageRequest.of(1, Integer.MAX_VALUE, Sort.by("StArT_TiMe"))) .getTotalElements()) .isEqualTo(4); } private TaskExecution initializeTaskExecutionWithExternalExecutionId() { TaskExecution expectedTaskExecution = TestVerifierUtils.createSampleTaskExecutionNoArg(); return this.dao.createTaskExecution(expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), "FOO1"); } private Iterator getPageIterator(int pageNum, int pageSize, Sort sort) { Pageable pageable = (sort == null) ? PageRequest.of(pageNum, pageSize) : PageRequest.of(pageNum, pageSize, sort); Page page = this.dao.findAll(pageable); assertThat(page.getTotalElements()).isEqualTo(3); assertThat(page.getTotalPages()).isEqualTo(2); return page.iterator(); } private void initializeRepository() { this.repository.createTaskExecution(getTaskExecution("FOO3", "externalA")); this.repository.createTaskExecution(getTaskExecution("FOO2", "externalB")); this.repository.createTaskExecution(getTaskExecution("FOO1", "externalC")); } private void initializeRepositoryNotInOrder() { this.repository.createTaskExecution(getTaskExecution("FOO1", "externalC")); this.repository.createTaskExecution(getTaskExecution("FOO2", "externalA")); this.repository.createTaskExecution(getTaskExecution("FOO3", "externalB")); } @Configuration static class TestDataSourceConfiguration { public static boolean firstTime = true; @Bean public DataSource dataSource() throws Exception { MariaDbDataSource datasource = new MariaDbDataSource(); datasource.setUrl(mariaDBContainer.getJdbcUrl()); datasource.setUser(mariaDBContainer.getUsername()); datasource.setPassword(mariaDBContainer.getPassword()); if (firstTime) { ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); databasePopulator .addScript(new ClassPathResource("/org/springframework/cloud/task/schema-mariadb.sql")); databasePopulator.execute(datasource); firstTime = false; } return datasource; } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/dao/TaskExecutionDaoTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.dao; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.Map; import java.util.UUID; import javax.sql.DataSource; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.EmbeddedDataSourceConfiguration; import org.springframework.cloud.task.configuration.TestConfiguration; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.cloud.task.util.TestDBUtils; import org.springframework.cloud.task.util.TestVerifierUtils; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * Executes unit tests on JdbcTaskExecutionDao. * * @author Glenn Renfro * @author Gunnar Hillert * @author Michael Minella */ @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = { TestConfiguration.class, EmbeddedDataSourceConfiguration.class, PropertyPlaceholderAutoConfiguration.class }) public class TaskExecutionDaoTests extends BaseTaskExecutionDaoTestCases { @Autowired TaskRepository repository; @Autowired private DataSource dataSource; @BeforeEach public void setup() { final JdbcTaskExecutionDao dao = new JdbcTaskExecutionDao(this.dataSource); dao.setTaskIncrementer(TestDBUtils.getIncrementer(this.dataSource)); super.dao = dao; } @ParameterizedTest @DirtiesContext @ValueSource(strings = { "db", "map" }) public void testStartTaskExecutionGeneric(String testType) { getDao(testType); TaskExecution expectedTaskExecution = this.dao.createTaskExecution(null, null, new ArrayList<>(0), null); expectedTaskExecution.setArguments(Collections.singletonList("foo=" + UUID.randomUUID().toString())); expectedTaskExecution.setStartTime(LocalDateTime.now()); expectedTaskExecution.setTaskName(UUID.randomUUID().toString()); this.dao.startTaskExecution(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), expectedTaskExecution.getExternalExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, getTaskExecution(testType, expectedTaskExecution)); } private TaskExecutionDao getDao(String type) { if (type.equals("db")) { final JdbcTaskExecutionDao jdbcDao = new JdbcTaskExecutionDao(this.dataSource); jdbcDao.setTaskIncrementer(TestDBUtils.getIncrementer(this.dataSource)); this.dao = jdbcDao; } else { this.dao = new MapTaskExecutionDao(); } return this.dao; } private TaskExecution getTaskExecution(String type, TaskExecution expectedTaskExecution) { TaskExecution taskExecution; if (type.equals("db")) { taskExecution = TestDBUtils.getTaskExecutionFromDB(this.dataSource, expectedTaskExecution.getExecutionId()); } else { Map taskExecutionMap = ((MapTaskExecutionDao) dao).getTaskExecutions(); taskExecution = taskExecutionMap.get(expectedTaskExecution.getExecutionId()); } return taskExecution; } @ParameterizedTest @DirtiesContext @ValueSource(strings = { "db", "map" }) public void createTaskExecution(String testType) { getDao(testType); TaskExecution expectedTaskExecution = TestVerifierUtils.createSampleTaskExecutionNoArg(); expectedTaskExecution = this.dao.createTaskExecution(expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), expectedTaskExecution.getExternalExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, getTaskExecution(testType, expectedTaskExecution)); } @ParameterizedTest @DirtiesContext @ValueSource(strings = { "db", "map" }) public void createEmptyTaskExecution(String testType) { getDao(testType); TaskExecution expectedTaskExecution = this.dao.createTaskExecution(null, null, new ArrayList<>(0), null); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, getTaskExecution(testType, expectedTaskExecution)); } @ParameterizedTest @DirtiesContext @ValueSource(strings = { "db", "map" }) public void completeTaskExecution(String testType) { getDao(testType); TaskExecution expectedTaskExecution = TestVerifierUtils.endSampleTaskExecutionNoArg(); expectedTaskExecution = this.dao.createTaskExecution(expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), expectedTaskExecution.getExternalExecutionId()); this.dao.completeTaskExecution(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getExitCode(), expectedTaskExecution.getEndTime(), expectedTaskExecution.getExitMessage()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, getTaskExecution(testType, expectedTaskExecution)); } @ParameterizedTest @DirtiesContext @ValueSource(strings = { "db", "map" }) public void completeTaskExecutionWithNoCreate(String testType) { getDao(testType); JdbcTaskExecutionDao dao = new JdbcTaskExecutionDao(this.dataSource); TaskExecution expectedTaskExecution = TestVerifierUtils.endSampleTaskExecutionNoArg(); assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> { dao.completeTaskExecution(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getExitCode(), expectedTaskExecution.getEndTime(), expectedTaskExecution.getExitMessage()); }); } @Test @DirtiesContext public void testFindAllPageableSort() { initializeRepositoryNotInOrder(); Sort sort = Sort.by(new Sort.Order(Sort.Direction.ASC, "EXTERNAL_EXECUTION_ID")); Iterator iter = getPageIterator(0, 2, sort); TaskExecution taskExecution = iter.next(); assertThat(taskExecution.getTaskName()).isEqualTo("FOO2"); taskExecution = iter.next(); assertThat(taskExecution.getTaskName()).isEqualTo("FOO3"); iter = getPageIterator(1, 2, sort); taskExecution = iter.next(); assertThat(taskExecution.getTaskName()).isEqualTo("FOO1"); } @Test @DirtiesContext public void testFindAllDefaultSort() { initializeRepository(); Iterator iter = getPageIterator(0, 2, null); TaskExecution taskExecution = iter.next(); assertThat(taskExecution.getTaskName()).isEqualTo("FOO1"); taskExecution = iter.next(); assertThat(taskExecution.getTaskName()).isEqualTo("FOO2"); iter = getPageIterator(1, 2, null); taskExecution = iter.next(); assertThat(taskExecution.getTaskName()).isEqualTo("FOO3"); } @ParameterizedTest @DirtiesContext @ValueSource(strings = { "db", "map" }) public void testStartExecutionWithNullExternalExecutionIdExisting(String testType) { getDao(testType); TaskExecution expectedTaskExecution = initializeTaskExecutionWithExternalExecutionId(); this.dao.startTaskExecution(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), null); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, getTaskExecution(testType, expectedTaskExecution)); } @ParameterizedTest @DirtiesContext @ValueSource(strings = { "db", "map" }) public void testStartExecutionWithNullExternalExecutionIdNonExisting(String testType) { getDao(testType); TaskExecution expectedTaskExecution = initializeTaskExecutionWithExternalExecutionId(); this.dao.startTaskExecution(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), "BAR"); expectedTaskExecution.setExternalExecutionId("BAR"); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, getTaskExecution(testType, expectedTaskExecution)); } @ParameterizedTest @DirtiesContext @ValueSource(strings = { "db", "map" }) public void testFindRunningTaskExecutions(String testType) { getDao(testType); initializeRepositoryNotInOrderWithMultipleTaskExecutions(); assertThat(this.dao.findRunningTaskExecutions("FOO1", PageRequest.of(1, 4, Sort.by("START_TIME"))) .getTotalElements()).isEqualTo(4); } @Test @DirtiesContext public void testFindRunningTaskExecutionsIllegalSort() { initializeRepositoryNotInOrderWithMultipleTaskExecutions(); assertThatThrownBy(() -> this.dao .findRunningTaskExecutions("FOO1", PageRequest.of(1, Integer.MAX_VALUE, Sort.by("ILLEGAL_SORT"))) .getTotalElements()).isInstanceOf(IllegalArgumentException.class) .hasMessage("Invalid sort option selected: ILLEGAL_SORT"); } @Test @DirtiesContext public void testFindRunningTaskExecutionsSortWithDifferentCase() { initializeRepositoryNotInOrderWithMultipleTaskExecutions(); assertThat( this.dao.findRunningTaskExecutions("FOO1", PageRequest.of(1, Integer.MAX_VALUE, Sort.by("StArT_TiMe"))) .getTotalElements()) .isEqualTo(4); } private TaskExecution initializeTaskExecutionWithExternalExecutionId() { TaskExecution expectedTaskExecution = TestVerifierUtils.createSampleTaskExecutionNoArg(); return this.dao.createTaskExecution(expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), "FOO1"); } private Iterator getPageIterator(int pageNum, int pageSize, Sort sort) { Pageable pageable = (sort == null) ? PageRequest.of(pageNum, pageSize) : PageRequest.of(pageNum, pageSize, sort); Page page = this.dao.findAll(pageable); assertThat(page.getTotalElements()).isEqualTo(3); assertThat(page.getTotalPages()).isEqualTo(2); return page.iterator(); } private void initializeRepository() { this.repository.createTaskExecution(getTaskExecution("FOO3", "externalA")); this.repository.createTaskExecution(getTaskExecution("FOO2", "externalB")); this.repository.createTaskExecution(getTaskExecution("FOO1", "externalC")); } private void initializeRepositoryNotInOrder() { this.repository.createTaskExecution(getTaskExecution("FOO1", "externalC")); this.repository.createTaskExecution(getTaskExecution("FOO2", "externalA")); this.repository.createTaskExecution(getTaskExecution("FOO3", "externalB")); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/database/support/FindAllPagingQueryProviderTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import java.util.Arrays; import java.util.Collection; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.springframework.cloud.task.util.TestDBUtils; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro * @author Ryan DCruz */ public class FindAllPagingQueryProviderTests { private Pageable pageable = PageRequest.of(0, 10); public static Collection data() { return Arrays.asList(new Object[][] { { "Oracle", "SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, " + "EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID FROM " + "(SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, " + "EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, ROWNUM as " + "TMP_ROW_NUM FROM (SELECT TASK_EXECUTION_ID, START_TIME, " + "END_TIME, TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID " + "FROM %PREFIX%EXECUTION ORDER BY START_TIME DESC, " + "TASK_EXECUTION_ID DESC)) WHERE TMP_ROW_NUM >= 1 AND " + "TMP_ROW_NUM < 11" }, { "HSQL Database Engine", "SELECT LIMIT 0 10 TASK_EXECUTION_ID, " + "START_TIME, END_TIME, TASK_NAME, EXIT_CODE, EXIT_MESSAGE, " + "ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID FROM %PREFIX%EXECUTION ORDER BY " + "START_TIME DESC, TASK_EXECUTION_ID DESC" }, { "PostgreSQL", "SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, " + "TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID " + "FROM %PREFIX%EXECUTION ORDER BY START_TIME DESC, " + "TASK_EXECUTION_ID DESC LIMIT 10 OFFSET 0" }, { "MySQL", "SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, " + "EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID FROM " + "%PREFIX%EXECUTION ORDER BY START_TIME DESC, " + "TASK_EXECUTION_ID DESC LIMIT 0, 10" }, { "Microsoft SQL Server", "SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, " + "TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID FROM " + "(SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, " + "EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, ROW_NUMBER() " + "OVER (ORDER BY START_TIME DESC, TASK_EXECUTION_ID DESC) AS " + "TMP_ROW_NUM FROM %PREFIX%EXECUTION) TASK_EXECUTION_PAGE " + "WHERE TMP_ROW_NUM >= 1 AND TMP_ROW_NUM < 11 ORDER BY START_TIME DESC, " + "TASK_EXECUTION_ID DESC" }, { "DB2/Linux", "SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, " + "TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID FROM " + "(SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, " + "EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, ROW_NUMBER() " + "OVER() as TMP_ROW_NUM FROM " + "(SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, " + "EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID FROM %PREFIX%EXECUTION ORDER BY START_TIME DESC, TASK_EXECUTION_ID DESC)) " + "WHERE TMP_ROW_NUM >= 1 AND TMP_ROW_NUM < 11" } }); } @ParameterizedTest @MethodSource("data") public void testGeneratedQuery(String databaseProductName, String expectedQuery) throws Exception { String actualQuery = TestDBUtils.getPagingQueryProvider(databaseProductName).getPageQuery(this.pageable); assertThat(actualQuery) .as(String.format("the generated query for %s, was not the expected query", databaseProductName)) .isEqualTo(expectedQuery); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/database/support/H2PagingQueryProviderTests.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import javax.sql.DataSource; import org.h2.engine.Mode.ModeEnum; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.EnumSource; import org.springframework.batch.infrastructure.item.database.Order; import org.springframework.data.domain.PageRequest; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.datasource.SimpleDriverDataSource; import org.springframework.jdbc.support.JdbcTransactionManager; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.support.TransactionTemplate; import static org.assertj.core.api.Assertions.assertThat; /** * @author Henning Pöttker * @author Mahmoud Ben Hassine */ class H2PagingQueryProviderTests { @ParameterizedTest @EnumSource(ModeEnum.class) void testH2PagingQueryProvider(ModeEnum mode) { String connectionUrl = String.format("jdbc:h2:mem:%s;MODE=%s", UUID.randomUUID(), mode); DataSource dataSource = new SimpleDriverDataSource(new org.h2.Driver(), connectionUrl, "sa", ""); JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); PlatformTransactionManager transactionManager = new JdbcTransactionManager(dataSource); TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager); transactionTemplate.executeWithoutResult(status -> { jdbcTemplate.execute("CREATE TABLE TEST_TABLE (ID BIGINT NOT NULL, STRING VARCHAR(10) NOT NULL)"); jdbcTemplate.execute("INSERT INTO TEST_TABLE (ID, STRING) VALUES (1, 'Spring')"); jdbcTemplate.execute("INSERT INTO TEST_TABLE (ID, STRING) VALUES (2, 'Cloud')"); jdbcTemplate.execute("INSERT INTO TEST_TABLE (ID, STRING) VALUES (3, 'Task')"); H2PagingQueryProvider queryProvider = new H2PagingQueryProvider(); queryProvider.setSelectClause("STRING"); queryProvider.setFromClause("TEST_TABLE"); Map sortKeys = new HashMap<>(); sortKeys.put("ID", Order.ASCENDING); queryProvider.setSortKeys(sortKeys); List firstPage = jdbcTemplate.queryForList(queryProvider.getPageQuery(PageRequest.of(0, 2)), String.class); assertThat(firstPage).containsExactly("Spring", "Cloud"); List secondPage = jdbcTemplate.queryForList(queryProvider.getPageQuery(PageRequest.of(1, 2)), String.class); assertThat(secondPage).containsExactly("Task"); }); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/database/support/InvalidPagingQueryProviderTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import org.junit.jupiter.api.Test; import org.springframework.cloud.task.util.TestDBUtils; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; /** * @author Glenn Renfro */ public class InvalidPagingQueryProviderTests { @Test public void testInvalidDatabase() throws Exception { Pageable pageable = PageRequest.of(0, 10); assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> { TestDBUtils.getPagingQueryProvider("Invalid").getPageQuery(pageable); }); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/database/support/SqlPagingQueryProviderFactoryBeanTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import java.util.Map; import java.util.TreeMap; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.batch.infrastructure.item.database.Order; import org.springframework.cloud.task.repository.dao.JdbcTaskExecutionDao; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.cloud.task.util.TestDBUtils; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro */ public class SqlPagingQueryProviderFactoryBeanTests { private SqlPagingQueryProviderFactoryBean factoryBean; @BeforeEach public void setup() throws Exception { this.factoryBean = new SqlPagingQueryProviderFactoryBean(); this.factoryBean.setDataSource(TestDBUtils.getMockDataSource("MySQL")); this.factoryBean.setDatabaseType("Oracle"); this.factoryBean.setSelectClause(JdbcTaskExecutionDao.SELECT_CLAUSE); this.factoryBean.setFromClause(JdbcTaskExecutionDao.FROM_CLAUSE); Map orderMap = new TreeMap<>(); orderMap.put("START_TIME", Order.DESCENDING); orderMap.put("TASK_EXECUTION_ID", Order.DESCENDING); this.factoryBean.setSortKeys(orderMap); } @Test public void testDatabaseType() throws Exception { PagingQueryProvider pagingQueryProvider = this.factoryBean.getObject(); assertThat(pagingQueryProvider).isInstanceOf(OraclePagingQueryProvider.class); } @Test public void testIsSingleton() { assertThat(this.factoryBean.isSingleton()).isTrue(); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/database/support/WhereClausePagingQueryProviderTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.database.support; import java.util.Arrays; import java.util.Collection; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.cloud.task.util.TestDBUtils; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro */ public class WhereClausePagingQueryProviderTests { private Pageable pageable = PageRequest.of(0, 10); public static Collection data() { return Arrays.asList(new Object[][] { { "Oracle", "SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, " + "EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID FROM " + "(SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, " + "EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, ROWNUM as " + "TMP_ROW_NUM FROM (SELECT TASK_EXECUTION_ID, START_TIME, " + "END_TIME, TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, " + "LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID FROM %PREFIX%EXECUTION " + "WHERE TASK_EXECUTION_ID = '0000' ORDER BY START_TIME DESC, " + "TASK_EXECUTION_ID DESC)) WHERE TMP_ROW_NUM >= 1 AND " + "TMP_ROW_NUM < 11" }, { "HSQL Database Engine", "SELECT LIMIT 0 10 TASK_EXECUTION_ID, " + "START_TIME, END_TIME, TASK_NAME, EXIT_CODE, EXIT_MESSAGE, " + "ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID FROM %PREFIX%EXECUTION " + "WHERE TASK_EXECUTION_ID = '0000' ORDER BY " + "START_TIME DESC, TASK_EXECUTION_ID DESC" }, { "PostgreSQL", "SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, " + "TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID " + "FROM %PREFIX%EXECUTION WHERE TASK_EXECUTION_ID = '0000' " + "ORDER BY START_TIME DESC, " + "TASK_EXECUTION_ID DESC LIMIT 10 OFFSET 0" }, { "MySQL", "SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, " + "EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID FROM " + "%PREFIX%EXECUTION WHERE TASK_EXECUTION_ID = '0000' " + "ORDER BY START_TIME DESC, " + "TASK_EXECUTION_ID DESC LIMIT 0, 10" }, { "Microsoft SQL Server", "SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, " + "TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID FROM " + "(SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, " + "EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, ROW_NUMBER() " + "OVER (ORDER BY START_TIME DESC, TASK_EXECUTION_ID DESC) AS " + "TMP_ROW_NUM FROM %PREFIX%EXECUTION WHERE TASK_EXECUTION_ID = " + "'0000') TASK_EXECUTION_PAGE WHERE TMP_ROW_NUM >= 1 " + "AND TMP_ROW_NUM < 11 ORDER BY START_TIME DESC, TASK_EXECUTION_ID DESC" } }); } @ParameterizedTest @MethodSource("data") public void testGeneratedQuery(String databaseProductName, String expectedQuery) throws Exception { PagingQueryProvider pagingQueryProvider = TestDBUtils.getPagingQueryProvider(databaseProductName, "TASK_EXECUTION_ID = '0000'"); String actualQuery = pagingQueryProvider.getPageQuery(this.pageable); assertThat(actualQuery) .as(String.format("the generated query for %s, was not the expected query", databaseProductName)) .isEqualTo(expectedQuery); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/support/DatabaseTypeTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import javax.sql.DataSource; import org.junit.jupiter.api.Test; import org.springframework.cloud.task.util.TestDBUtils; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.springframework.cloud.task.repository.support.DatabaseType.HSQL; import static org.springframework.cloud.task.repository.support.DatabaseType.MARIADB; import static org.springframework.cloud.task.repository.support.DatabaseType.MYSQL; import static org.springframework.cloud.task.repository.support.DatabaseType.ORACLE; import static org.springframework.cloud.task.repository.support.DatabaseType.POSTGRES; import static org.springframework.cloud.task.repository.support.DatabaseType.fromProductName; /** * Tests that the correct database names are selected from datasource metadata. * * @author Lucas Ward * @author Will Schipp * @author Glenn Renfro * */ public class DatabaseTypeTests { @Test public void testFromProductName() { assertThat(fromProductName("HSQL Database Engine")).isEqualTo(HSQL); assertThat(fromProductName("Oracle")).isEqualTo(ORACLE); assertThat(fromProductName("PostgreSQL")).isEqualTo(POSTGRES); assertThat(fromProductName("MySQL")).isEqualTo(MYSQL); assertThat(fromProductName("MariaDB")).isEqualTo(MARIADB); } @Test public void testInvalidProductName() { assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> fromProductName("bad product name")); } @Test public void testFromMetaDataForHsql() throws Exception { DataSource ds = TestDBUtils.getMockDataSource("HSQL Database Engine"); assertThat(DatabaseType.fromMetaData(ds)).isEqualTo(HSQL); } @Test public void testFromMetaDataForOracle() throws Exception { DataSource ds = TestDBUtils.getMockDataSource("Oracle"); assertThat(DatabaseType.fromMetaData(ds)).isEqualTo(ORACLE); } @Test public void testFromMetaDataForPostgres() throws Exception { DataSource ds = TestDBUtils.getMockDataSource("PostgreSQL"); assertThat(DatabaseType.fromMetaData(ds)).isEqualTo(POSTGRES); } @Test public void testFromMetaDataForMySQL() throws Exception { DataSource ds = TestDBUtils.getMockDataSource("MySQL"); assertThat(DatabaseType.fromMetaData(ds)).isEqualTo(MYSQL); } @Test public void testFromMetaDataForMariaDB() throws Exception { DataSource ds = TestDBUtils.getMockDataSource("MariaDB"); assertThat(DatabaseType.fromMetaData(ds)).isEqualTo(MARIADB); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/support/SimpleTaskExplorerTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.AutowireCapableBeanFactory; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.EmbeddedDataSourceConfiguration; import org.springframework.cloud.task.configuration.TestConfiguration; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.cloud.task.util.TestVerifierUtils; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Configuration; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro * @author Gunnar Hillert */ public class SimpleTaskExplorerTests { private final static String TASK_NAME = "FOOBAR"; private final static String EXTERNAL_EXECUTION_ID = "123ABC"; private AnnotationConfigApplicationContext context; @Autowired private TaskExplorer taskExplorer; @Autowired private TaskRepository taskRepository; public static Collection data() { return Arrays.asList(new Object[] { DaoType.jdbc, DaoType.map }); } public void testDefaultContext(DaoType testType) { if (testType == DaoType.jdbc) { initializeJdbcExplorerTest(); } else { initializeMapExplorerTest(); } } @AfterEach public void close() { if (this.context != null) { this.context.close(); } } @ParameterizedTest @MethodSource("data") public void getTaskExecution(DaoType testType) { testDefaultContext(testType); Map expectedResults = createSampleDataSet(5); for (Long taskExecutionId : expectedResults.keySet()) { TaskExecution actualTaskExecution = this.taskExplorer.getTaskExecution(taskExecutionId); assertThat(actualTaskExecution) .as(String.format("expected a taskExecution but got null for test type %s", testType)) .isNotNull(); TestVerifierUtils.verifyTaskExecution(expectedResults.get(taskExecutionId), actualTaskExecution); } } @ParameterizedTest @MethodSource("data") public void taskExecutionNotFound(DaoType testType) { testDefaultContext(testType); createSampleDataSet(5); TaskExecution actualTaskExecution = this.taskExplorer.getTaskExecution(-5); assertThat(actualTaskExecution).as(String.format("expected null for actualTaskExecution %s", testType)) .isNull(); } @ParameterizedTest @MethodSource("data") public void getTaskCountByTaskName(DaoType testType) { testDefaultContext(testType); Map expectedResults = createSampleDataSet(5); for (Map.Entry entry : expectedResults.entrySet()) { String taskName = entry.getValue().getTaskName(); assertThat(this.taskExplorer.getTaskExecutionCountByTaskName(taskName)) .as(String.format("task count for task name did not match expected result for testType %s", testType)) .isEqualTo(1); } } @ParameterizedTest @MethodSource("data") public void getTaskCount(DaoType testType) { testDefaultContext(testType); createSampleDataSet(33); assertThat(this.taskExplorer.getTaskExecutionCount()) .as(String.format("task count did not match expected result for test Type %s", testType)) .isEqualTo(33); } @ParameterizedTest @MethodSource("data") public void getRunningTaskCount(DaoType testType) { testDefaultContext(testType); createSampleDataSet(33); assertThat(this.taskExplorer.getRunningTaskExecutionCount()) .as(String.format("task count did not match expected result for test Type %s", testType)) .isEqualTo(33); } @ParameterizedTest @MethodSource("data") public void findRunningTasks(DaoType testType) { testDefaultContext(testType); final int TEST_COUNT = 2; final int COMPLETE_COUNT = 5; Map expectedResults = new HashMap<>(); // Store completed task executions int i = 0; for (; i < COMPLETE_COUNT; i++) { createAndSaveTaskExecution(i); } for (; i < (COMPLETE_COUNT + TEST_COUNT); i++) { TaskExecution expectedTaskExecution = this.taskRepository.createTaskExecution(getSimpleTaskExecution()); expectedResults.put(expectedTaskExecution.getExecutionId(), expectedTaskExecution); } Pageable pageable = PageRequest.of(0, 10); Page actualResults = this.taskExplorer.findRunningTaskExecutions(TASK_NAME, pageable); assertThat(actualResults.getNumberOfElements()) .as(String.format("Running task count for task name did not match expected result for testType %s", testType)) .isEqualTo(TEST_COUNT); for (TaskExecution result : actualResults) { assertThat(expectedResults.containsKey(result.getExecutionId())) .as(String.format("result returned from repo %s not expected for testType %s", result.getExecutionId(), testType)) .isTrue(); assertThat(result.getEndTime()) .as(String.format("result had non null for endTime for the testType %s", testType)) .isNull(); } } @ParameterizedTest @MethodSource("data") public void findTasksByExternalExecutionId(DaoType testType) { testDefaultContext(testType); Map sampleDataSet = createSampleDataSet(33); sampleDataSet.values().forEach(taskExecution -> { Page taskExecutionsByExecutionId = this.taskExplorer .findTaskExecutionsByExecutionId(taskExecution.getExternalExecutionId(), PageRequest.of(0, 5)); assertThat(taskExecutionsByExecutionId.getTotalElements()).isEqualTo(1); assertThat(this.taskExplorer .getTaskExecutionCountByExternalExecutionId(taskExecution.getExternalExecutionId())).isEqualTo(1); TaskExecution resultTaskExecution = taskExecutionsByExecutionId.getContent().get(0); assertThat(resultTaskExecution.getExecutionId()).isEqualTo(taskExecution.getExecutionId()); }); } @ParameterizedTest @MethodSource("data") public void findTasksByExternalExecutionIdMultipleEntry(DaoType testType) { testDefaultContext(testType); testDefaultContext(testType); final int SAME_EXTERNAL_ID_COUNT = 2; final int UNIQUE_COUNT = 3; Map expectedResults = new HashMap<>(); // Store task executions each with a unique external execution id int i = 0; for (; i < UNIQUE_COUNT; i++) { createAndSaveTaskExecution(i); } // Create task execution with same external execution id for (; i < (UNIQUE_COUNT + SAME_EXTERNAL_ID_COUNT); i++) { TaskExecution expectedTaskExecution = this.taskRepository.createTaskExecution(getSimpleTaskExecution()); expectedResults.put(expectedTaskExecution.getExecutionId(), expectedTaskExecution); } Pageable pageable = PageRequest.of(0, 10); Page resultSet = this.taskExplorer.findTaskExecutionsByExecutionId(EXTERNAL_EXECUTION_ID, pageable); assertThat(resultSet.getTotalElements()).isEqualTo(SAME_EXTERNAL_ID_COUNT); List taskExecutions = resultSet.getContent(); taskExecutions.forEach(taskExecution -> { assertThat(expectedResults.keySet()).contains(taskExecution.getExecutionId()); }); assertThat(this.taskExplorer.getTaskExecutionCountByExternalExecutionId(EXTERNAL_EXECUTION_ID)) .isEqualTo(SAME_EXTERNAL_ID_COUNT); } @ParameterizedTest @MethodSource("data") public void findTasksByName(DaoType testType) { testDefaultContext(testType); final int TEST_COUNT = 5; final int COMPLETE_COUNT = 7; Map expectedResults = new HashMap<>(); // Store completed task executions for (int i = 0; i < COMPLETE_COUNT; i++) { createAndSaveTaskExecution(i); } for (int i = 0; i < TEST_COUNT; i++) { TaskExecution expectedTaskExecution = this.taskRepository.createTaskExecution(getSimpleTaskExecution()); expectedResults.put(expectedTaskExecution.getExecutionId(), expectedTaskExecution); } Pageable pageable = PageRequest.of(0, 10); Page resultSet = this.taskExplorer.findTaskExecutionsByName(TASK_NAME, pageable); assertThat(resultSet.getNumberOfElements()) .as(String.format("Running task count for task name did not match expected result for testType %s", testType)) .isEqualTo(TEST_COUNT); for (TaskExecution result : resultSet) { assertThat(expectedResults.containsKey(result.getExecutionId())) .as(String.format("result returned from %s repo %s not expected", testType, result.getExecutionId())) .isTrue(); assertThat(result.getTaskName()) .as(String.format("taskName for taskExecution is incorrect for testType %s", testType)) .isEqualTo(TASK_NAME); } } @ParameterizedTest @MethodSource("data") public void getTaskNames(DaoType testType) { testDefaultContext(testType); final int TEST_COUNT = 5; Set expectedResults = new HashSet<>(); for (int i = 0; i < TEST_COUNT; i++) { TaskExecution expectedTaskExecution = createAndSaveTaskExecution(i); expectedResults.add(expectedTaskExecution.getTaskName()); } List actualTaskNames = this.taskExplorer.getTaskNames(); for (String taskName : actualTaskNames) { assertThat(expectedResults.contains(taskName)) .as(String.format("taskName was not in expected results for testType %s", testType)) .isTrue(); } } @ParameterizedTest @MethodSource("data") public void findAllExecutionsOffBoundry(DaoType testType) { testDefaultContext(testType); Pageable pageable = PageRequest.of(0, 10); verifyPageResults(pageable, 103); } @ParameterizedTest @MethodSource("data") public void findAllExecutionsOffBoundryByOne(DaoType testType) { testDefaultContext(testType); Pageable pageable = PageRequest.of(0, 10); verifyPageResults(pageable, 101); } @ParameterizedTest @MethodSource("data") public void findAllExecutionsOnBoundry(DaoType testType) { testDefaultContext(testType); Pageable pageable = PageRequest.of(0, 10); verifyPageResults(pageable, 100); } @ParameterizedTest @MethodSource("data") public void findAllExecutionsNoResult(DaoType testType) { testDefaultContext(testType); Pageable pageable = PageRequest.of(0, 10); verifyPageResults(pageable, 0); } @ParameterizedTest @MethodSource("data") public void findTasksForInvalidJob(DaoType testType) { testDefaultContext(testType); assertThat(this.taskExplorer.getTaskExecutionIdByJobExecutionId(55555L)).isNull(); } @ParameterizedTest @MethodSource("data") public void findJobsExecutionIdsForInvalidTask(DaoType testType) { testDefaultContext(testType); assertThat(this.taskExplorer.getJobExecutionIdsByTaskExecutionId(555555L).size()).isEqualTo(0); } @ParameterizedTest @MethodSource("data") public void getLatestTaskExecutionForTaskName(DaoType testType) { testDefaultContext(testType); Map expectedResults = createSampleDataSet(5); for (Map.Entry taskExecutionMapEntry : expectedResults.entrySet()) { TaskExecution latestTaskExecution = this.taskExplorer .getLatestTaskExecutionForTaskName(taskExecutionMapEntry.getValue().getTaskName()); assertThat(latestTaskExecution) .as(String.format("expected a taskExecution but got null for test type %s", testType)) .isNotNull(); TestVerifierUtils.verifyTaskExecution(expectedResults.get(latestTaskExecution.getExecutionId()), latestTaskExecution); } } @ParameterizedTest @MethodSource("data") public void getLatestTaskExecutionsByTaskNames(DaoType testType) { testDefaultContext(testType); Map expectedResults = createSampleDataSet(5); final List taskNamesAsList = new ArrayList<>(); for (TaskExecution taskExecution : expectedResults.values()) { taskNamesAsList.add(taskExecution.getTaskName()); } final List latestTaskExecutions = this.taskExplorer .getLatestTaskExecutionsByTaskNames(taskNamesAsList.toArray(new String[taskNamesAsList.size()])); for (TaskExecution latestTaskExecution : latestTaskExecutions) { assertThat(latestTaskExecution) .as(String.format("expected a taskExecution but got null for test type %s", testType)) .isNotNull(); TestVerifierUtils.verifyTaskExecution(expectedResults.get(latestTaskExecution.getExecutionId()), latestTaskExecution); } } private void verifyPageResults(Pageable pageable, int totalNumberOfExecs) { Map expectedResults = createSampleDataSet(totalNumberOfExecs); List sortedExecIds = getSortedOfTaskExecIds(expectedResults); Iterator expectedTaskExecutionIter = sortedExecIds.iterator(); // Verify pageable totals Page taskPage = this.taskExplorer.findAll(pageable); int pagesExpected = (int) Math.ceil(totalNumberOfExecs / ((double) pageable.getPageSize())); assertThat(taskPage.getTotalPages()).as("actual page count return was not the expected total") .isEqualTo(pagesExpected); assertThat(taskPage.getTotalElements()).as("actual element count was not the expected count") .isEqualTo(totalNumberOfExecs); // Verify pagination Pageable actualPageable = PageRequest.of(0, pageable.getPageSize()); boolean hasMorePages = taskPage.hasContent(); int pageNumber = 0; int elementCount = 0; while (hasMorePages) { taskPage = this.taskExplorer.findAll(actualPageable); hasMorePages = taskPage.hasNext(); List actualTaskExecutions = taskPage.getContent(); int expectedPageSize = pageable.getPageSize(); if (!hasMorePages && pageable.getPageSize() != actualTaskExecutions.size()) { expectedPageSize = totalNumberOfExecs % pageable.getPageSize(); } assertThat(actualTaskExecutions.size()) .as(String.format("Element count on page did not match on the %n page", pageNumber)) .isEqualTo(expectedPageSize); for (TaskExecution actualExecution : actualTaskExecutions) { assertThat(actualExecution.getExecutionId()) .as(String.format("Element on page %n did not match expected", pageNumber)) .isEqualTo((long) expectedTaskExecutionIter.next()); TestVerifierUtils.verifyTaskExecution(expectedResults.get(actualExecution.getExecutionId()), actualExecution); elementCount++; } actualPageable = taskPage.nextPageable(); pageNumber++; } // Verify actual totals assertThat(pageNumber).as("Pages processed did not equal expected").isEqualTo(pagesExpected); assertThat(elementCount).as("Elements processed did not equal expected,").isEqualTo(totalNumberOfExecs); } private TaskExecution createAndSaveTaskExecution(int i) { TaskExecution taskExecution = TestVerifierUtils.createSampleTaskExecution(i); taskExecution = this.taskRepository.createTaskExecution(taskExecution); return taskExecution; } private void initializeJdbcExplorerTest() { this.context = new AnnotationConfigApplicationContext(); this.context.register(TestConfiguration.class, EmbeddedDataSourceConfiguration.class, PropertyPlaceholderAutoConfiguration.class); this.context.refresh(); this.context.getAutowireCapableBeanFactory() .autowireBeanProperties(this, AutowireCapableBeanFactory.AUTOWIRE_BY_TYPE, false); } private void initializeMapExplorerTest() { this.context = new AnnotationConfigApplicationContext(); this.context.register(TestConfiguration.class, PropertyPlaceholderAutoConfiguration.class); this.context.refresh(); this.context.getAutowireCapableBeanFactory() .autowireBeanProperties(this, AutowireCapableBeanFactory.AUTOWIRE_BY_TYPE, false); } private Map createSampleDataSet(int count) { Map expectedResults = new HashMap<>(); for (int i = 0; i < count; i++) { TaskExecution expectedTaskExecution = createAndSaveTaskExecution(i); expectedResults.put(expectedTaskExecution.getExecutionId(), expectedTaskExecution); } return expectedResults; } private List getSortedOfTaskExecIds(Map taskExecutionMap) { List sortedExecIds = new ArrayList<>(taskExecutionMap.size()); TreeSet sortedSet = getTreeSet(); sortedSet.addAll(taskExecutionMap.values()); Iterator iterator = sortedSet.descendingIterator(); while (iterator.hasNext()) { sortedExecIds.add(iterator.next().getExecutionId()); } return sortedExecIds; } private TreeSet getTreeSet() { return new TreeSet<>(new Comparator() { @Override public int compare(TaskExecution e1, TaskExecution e2) { int result = e1.getStartTime().compareTo(e2.getStartTime()); if (result == 0) { result = Long.valueOf(e1.getExecutionId()).compareTo(e2.getExecutionId()); } return result; } }); } private TaskExecution getSimpleTaskExecution() { TaskExecution taskExecution = new TaskExecution(); taskExecution.setTaskName(TASK_NAME); taskExecution.setStartTime(LocalDateTime.now()); taskExecution.setExternalExecutionId(EXTERNAL_EXECUTION_ID); return taskExecution; } private enum DaoType { jdbc, map } @Configuration public static class DataSourceConfiguration { } @Configuration public static class EmptyConfiguration { } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/support/SimpleTaskNameResolverTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import org.junit.jupiter.api.Test; import org.springframework.context.support.GenericApplicationContext; import static org.assertj.core.api.Assertions.assertThat; /** * @author Michael Minella */ public class SimpleTaskNameResolverTests { @Test public void testDefault() { GenericApplicationContext context = new GenericApplicationContext(); SimpleTaskNameResolver taskNameResolver = new SimpleTaskNameResolver(); taskNameResolver.setApplicationContext(context); assertThat(taskNameResolver.getTaskName() .startsWith("org.springframework.context.support.GenericApplicationContext")).isTrue(); } @Test public void testWithProfile() { GenericApplicationContext context = new GenericApplicationContext(); context.setId("foo:bar"); SimpleTaskNameResolver taskNameResolver = new SimpleTaskNameResolver(); taskNameResolver.setApplicationContext(context); assertThat(taskNameResolver.getTaskName().startsWith("foo_bar")).isTrue(); } @Test public void testApplicationName() { GenericApplicationContext context = new GenericApplicationContext(); context.setId("foo"); SimpleTaskNameResolver taskNameResolver = new SimpleTaskNameResolver(); taskNameResolver.setApplicationContext(context); assertThat(taskNameResolver.getTaskName()).isEqualTo("foo"); } @Test public void testExternalConfig() { GenericApplicationContext context = new GenericApplicationContext(); context.setId("foo"); SimpleTaskNameResolver taskNameResolver = new SimpleTaskNameResolver(); taskNameResolver.setApplicationContext(context); taskNameResolver.setConfiguredName("bar"); assertThat(taskNameResolver.getTaskName()).isEqualTo("bar"); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/support/SimpleTaskRepositoryJdbcTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import java.time.LocalDateTime; import java.util.Collections; import java.util.UUID; import javax.sql.DataSource; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.EmbeddedDataSourceConfiguration; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.cloud.task.util.TaskExecutionCreator; import org.springframework.cloud.task.util.TestDBUtils; import org.springframework.cloud.task.util.TestVerifierUtils; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; /** * Tests for the SimpleTaskRepository that uses JDBC as a datastore. * * @author Glenn Renfro. * @author Michael Minella * @author Ilayaperumal Gopinathan */ @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = { EmbeddedDataSourceConfiguration.class, SimpleTaskAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class }) @DirtiesContext public class SimpleTaskRepositoryJdbcTests { @Autowired private TaskRepository taskRepository; @Autowired private DataSource dataSource; @Test @DirtiesContext public void testCreateEmptyExecution() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreEmptyTaskExecution(this.taskRepository); TaskExecution actualTaskExecution = TestDBUtils.getTaskExecutionFromDB(this.dataSource, expectedTaskExecution.getExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, actualTaskExecution); } @Test @DirtiesContext public void testCreateTaskExecutionNoParam() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); TaskExecution actualTaskExecution = TestDBUtils.getTaskExecutionFromDB(this.dataSource, expectedTaskExecution.getExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, actualTaskExecution); } @Test @DirtiesContext public void testCreateTaskExecutionWithParam() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionWithParams(this.taskRepository); TaskExecution actualTaskExecution = TestDBUtils.getTaskExecutionFromDB(this.dataSource, expectedTaskExecution.getExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, actualTaskExecution); } @Test @DirtiesContext public void startTaskExecutionWithParam() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreEmptyTaskExecution(this.taskRepository); expectedTaskExecution.setArguments(Collections.singletonList("foo=" + UUID.randomUUID().toString())); expectedTaskExecution.setStartTime(LocalDateTime.now()); expectedTaskExecution.setTaskName(UUID.randomUUID().toString()); TaskExecution actualTaskExecution = this.taskRepository.startTaskExecution( expectedTaskExecution.getExecutionId(), expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), expectedTaskExecution.getExternalExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, actualTaskExecution); } @Test @DirtiesContext public void startTaskExecutionWithNoParam() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreEmptyTaskExecution(this.taskRepository); expectedTaskExecution.setStartTime(LocalDateTime.now()); expectedTaskExecution.setTaskName(UUID.randomUUID().toString()); TaskExecution actualTaskExecution = this.taskRepository.startTaskExecution( expectedTaskExecution.getExecutionId(), expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), expectedTaskExecution.getExternalExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, actualTaskExecution); } @Test public void testUpdateExternalExecutionId() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setExternalExecutionId(UUID.randomUUID().toString()); this.taskRepository.updateExternalExecutionId(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getExternalExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, TestDBUtils.getTaskExecutionFromDB(this.dataSource, expectedTaskExecution.getExecutionId())); } @Test public void testUpdateNullExternalExecutionId() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setExternalExecutionId(null); this.taskRepository.updateExternalExecutionId(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getExternalExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, TestDBUtils.getTaskExecutionFromDB(this.dataSource, expectedTaskExecution.getExecutionId())); } @Test public void testInvalidExecutionIdForExternalExecutionIdUpdate() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setExternalExecutionId(null); assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> { this.taskRepository.updateExternalExecutionId(-1, expectedTaskExecution.getExternalExecutionId()); }); } @Test @DirtiesContext public void startTaskExecutionWithParent() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreEmptyTaskExecution(this.taskRepository); expectedTaskExecution.setStartTime(LocalDateTime.now()); expectedTaskExecution.setTaskName(UUID.randomUUID().toString()); expectedTaskExecution.setParentExecutionId(12345L); TaskExecution actualTaskExecution = this.taskRepository.startTaskExecution( expectedTaskExecution.getExecutionId(), expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), expectedTaskExecution.getExternalExecutionId(), expectedTaskExecution.getParentExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, actualTaskExecution); } @Test @DirtiesContext public void testCompleteTaskExecution() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setEndTime(LocalDateTime.now()); expectedTaskExecution.setExitCode(77); expectedTaskExecution.setExitMessage(UUID.randomUUID().toString()); TaskExecution actualTaskExecution = TaskExecutionCreator.completeExecution(this.taskRepository, expectedTaskExecution); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, actualTaskExecution); } @Test @DirtiesContext public void testCreateTaskExecutionNoParamMaxExitDefaultMessageSize() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setExitMessage(new String(new char[SimpleTaskRepository.MAX_EXIT_MESSAGE_SIZE + 1])); expectedTaskExecution.setEndTime(LocalDateTime.now()); expectedTaskExecution.setExitCode(0); TaskExecution actualTaskExecution = completeTaskExecution(expectedTaskExecution, this.taskRepository); assertThat(actualTaskExecution.getExitMessage().length()).isEqualTo(SimpleTaskRepository.MAX_EXIT_MESSAGE_SIZE); } @Test public void testCreateTaskExecutionNoParamMaxExitMessageSize() { SimpleTaskRepository simpleTaskRepository = new SimpleTaskRepository( new TaskExecutionDaoFactoryBean(this.dataSource)); simpleTaskRepository.setMaxExitMessageSize(5); TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(simpleTaskRepository); expectedTaskExecution.setExitMessage(new String(new char[SimpleTaskRepository.MAX_EXIT_MESSAGE_SIZE + 1])); expectedTaskExecution.setEndTime(LocalDateTime.now()); expectedTaskExecution.setExitCode(0); TaskExecution actualTaskExecution = completeTaskExecution(expectedTaskExecution, simpleTaskRepository); assertThat(actualTaskExecution.getExitMessage().length()).isEqualTo(5); } @Test @DirtiesContext public void testCreateTaskExecutionNoParamMaxErrorDefaultMessageSize() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setErrorMessage(new String(new char[SimpleTaskRepository.MAX_ERROR_MESSAGE_SIZE + 1])); expectedTaskExecution.setEndTime(LocalDateTime.now()); expectedTaskExecution.setExitCode(0); TaskExecution actualTaskExecution = completeTaskExecution(expectedTaskExecution, this.taskRepository); assertThat(actualTaskExecution.getErrorMessage().length()) .isEqualTo(SimpleTaskRepository.MAX_ERROR_MESSAGE_SIZE); } @Test public void testCreateTaskExecutionNoParamMaxErrorMessageSize() { SimpleTaskRepository simpleTaskRepository = new SimpleTaskRepository( new TaskExecutionDaoFactoryBean(this.dataSource)); simpleTaskRepository.setMaxErrorMessageSize(5); TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(simpleTaskRepository); expectedTaskExecution.setErrorMessage(new String(new char[SimpleTaskRepository.MAX_ERROR_MESSAGE_SIZE + 1])); expectedTaskExecution.setEndTime(LocalDateTime.now()); expectedTaskExecution.setExitCode(0); TaskExecution actualTaskExecution = completeTaskExecution(expectedTaskExecution, simpleTaskRepository); assertThat(actualTaskExecution.getErrorMessage().length()).isEqualTo(5); } @Test public void testMaxTaskNameSizeForConstructor() { final int MAX_EXIT_MESSAGE_SIZE = 10; final int MAX_ERROR_MESSAGE_SIZE = 20; final int MAX_TASK_NAME_SIZE = 30; SimpleTaskRepository simpleTaskRepository = new SimpleTaskRepository( new TaskExecutionDaoFactoryBean(this.dataSource), MAX_EXIT_MESSAGE_SIZE, MAX_TASK_NAME_SIZE, MAX_ERROR_MESSAGE_SIZE); TaskExecution expectedTaskExecution = TestVerifierUtils.createSampleTaskExecutionNoArg(); expectedTaskExecution.setTaskName(new String(new char[MAX_TASK_NAME_SIZE + 1])); assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { simpleTaskRepository.createTaskExecution(expectedTaskExecution); }); } @Test public void testDefaultMaxTaskNameSizeForConstructor() { SimpleTaskRepository simpleTaskRepository = new SimpleTaskRepository( new TaskExecutionDaoFactoryBean(this.dataSource), null, null, null); TaskExecution expectedTaskExecution = TestVerifierUtils.createSampleTaskExecutionNoArg(); expectedTaskExecution.setTaskName(new String(new char[SimpleTaskRepository.MAX_TASK_NAME_SIZE + 1])); assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { simpleTaskRepository.createTaskExecution(expectedTaskExecution); }); } @Test public void testMaxSizeConstructor() { final int MAX_EXIT_MESSAGE_SIZE = 10; final int MAX_ERROR_MESSAGE_SIZE = 20; SimpleTaskRepository simpleTaskRepository = new SimpleTaskRepository( new TaskExecutionDaoFactoryBean(this.dataSource), MAX_EXIT_MESSAGE_SIZE, null, MAX_ERROR_MESSAGE_SIZE); verifyTaskRepositoryConstructor(MAX_EXIT_MESSAGE_SIZE, MAX_ERROR_MESSAGE_SIZE, simpleTaskRepository); } @Test public void testDefaultConstructor() { SimpleTaskRepository simpleTaskRepository = new SimpleTaskRepository( new TaskExecutionDaoFactoryBean(this.dataSource), null, null, null); verifyTaskRepositoryConstructor(SimpleTaskRepository.MAX_EXIT_MESSAGE_SIZE, SimpleTaskRepository.MAX_ERROR_MESSAGE_SIZE, simpleTaskRepository); } @Test @DirtiesContext public void testCreateTaskExecutionNoParamMaxTaskName() { TaskExecution taskExecution = new TaskExecution(); taskExecution.setTaskName(new String(new char[SimpleTaskRepository.MAX_TASK_NAME_SIZE + 1])); taskExecution.setStartTime(LocalDateTime.now()); assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { this.taskRepository.createTaskExecution(taskExecution); }); } @Test @DirtiesContext public void testCreateTaskExecutionNegativeException() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setEndTime(LocalDateTime.now()); expectedTaskExecution.setExitCode(-1); assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { TaskExecution actualTaskExecution = TaskExecutionCreator.completeExecution(this.taskRepository, expectedTaskExecution); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, actualTaskExecution); }); } @Test @DirtiesContext public void testCreateTaskExecutionNullEndTime() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setExitCode(-1); assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { TaskExecutionCreator.completeExecution(this.taskRepository, expectedTaskExecution); }); } private TaskExecution completeTaskExecution(TaskExecution expectedTaskExecution, TaskRepository taskRepository) { return taskRepository.completeTaskExecution(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getExitCode(), LocalDateTime.now(), expectedTaskExecution.getExitMessage(), expectedTaskExecution.getErrorMessage()); } private void verifyTaskRepositoryConstructor(Integer maxExitMessage, Integer maxErrorMessage, TaskRepository taskRepository) { TaskExecution expectedTaskExecution = TaskExecutionCreator.createAndStoreTaskExecutionNoParams(taskRepository); expectedTaskExecution.setErrorMessage(new String(new char[maxErrorMessage + 1])); expectedTaskExecution.setExitMessage(new String(new char[maxExitMessage + 1])); expectedTaskExecution.setEndTime(LocalDateTime.now()); expectedTaskExecution.setExitCode(0); TaskExecution actualTaskExecution = completeTaskExecution(expectedTaskExecution, taskRepository); assertThat(actualTaskExecution.getErrorMessage().length()).isEqualTo(maxErrorMessage.intValue()); assertThat(actualTaskExecution.getExitMessage().length()).isEqualTo(maxExitMessage.intValue()); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/support/SimpleTaskRepositoryMapTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import java.time.LocalDateTime; import java.util.Collections; import java.util.Map; import java.util.UUID; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.cloud.task.repository.dao.MapTaskExecutionDao; import org.springframework.cloud.task.util.TaskExecutionCreator; import org.springframework.cloud.task.util.TestVerifierUtils; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.springframework.test.util.AssertionErrors.assertTrue; /** * Tests for the SimpleTaskRepository that uses Map as a datastore. * * @author Glenn Renfro * @author Ilayaperumal Gopinathan */ public class SimpleTaskRepositoryMapTests { private TaskRepository taskRepository; @BeforeEach public void setUp() { this.taskRepository = new SimpleTaskRepository(new TaskExecutionDaoFactoryBean()); } @Test public void testCreateEmptyExecution() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreEmptyTaskExecution(this.taskRepository); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, getSingleTaskExecutionFromMapRepository(expectedTaskExecution.getExecutionId())); } @Test public void testCreateTaskExecutionNoParam() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, getSingleTaskExecutionFromMapRepository(expectedTaskExecution.getExecutionId())); } @Test public void testUpdateExternalExecutionId() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setExternalExecutionId(UUID.randomUUID().toString()); this.taskRepository.updateExternalExecutionId(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getExternalExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, getSingleTaskExecutionFromMapRepository(expectedTaskExecution.getExecutionId())); } @Test public void testUpdateNullExternalExecutionId() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setExternalExecutionId(null); this.taskRepository.updateExternalExecutionId(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getExternalExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, getSingleTaskExecutionFromMapRepository(expectedTaskExecution.getExecutionId())); } @Test public void testInvalidExecutionIdForExternalExecutionIdUpdate() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setExternalExecutionId(null); assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { this.taskRepository.updateExternalExecutionId(-1, expectedTaskExecution.getExternalExecutionId()); }); } @Test public void testCreateTaskExecutionWithParam() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionWithParams(this.taskRepository); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, getSingleTaskExecutionFromMapRepository(expectedTaskExecution.getExecutionId())); } @Test public void startTaskExecutionWithParam() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreEmptyTaskExecution(this.taskRepository); expectedTaskExecution.setArguments(Collections.singletonList("foo=" + UUID.randomUUID().toString())); expectedTaskExecution.setStartTime(LocalDateTime.now()); expectedTaskExecution.setTaskName(UUID.randomUUID().toString()); TaskExecution actualTaskExecution = this.taskRepository.startTaskExecution( expectedTaskExecution.getExecutionId(), expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), expectedTaskExecution.getExternalExecutionId(), expectedTaskExecution.getParentExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, actualTaskExecution); } @Test public void startTaskExecutionWithNoParam() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreEmptyTaskExecution(this.taskRepository); expectedTaskExecution.setStartTime(LocalDateTime.now()); expectedTaskExecution.setTaskName(UUID.randomUUID().toString()); TaskExecution actualTaskExecution = this.taskRepository.startTaskExecution( expectedTaskExecution.getExecutionId(), expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), expectedTaskExecution.getExternalExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, actualTaskExecution); } @Test public void startTaskExecutionWithParent() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreEmptyTaskExecution(this.taskRepository); expectedTaskExecution.setStartTime(LocalDateTime.now()); expectedTaskExecution.setTaskName(UUID.randomUUID().toString()); expectedTaskExecution.setParentExecutionId(12345L); TaskExecution actualTaskExecution = this.taskRepository.startTaskExecution( expectedTaskExecution.getExecutionId(), expectedTaskExecution.getTaskName(), expectedTaskExecution.getStartTime(), expectedTaskExecution.getArguments(), expectedTaskExecution.getExternalExecutionId()); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, actualTaskExecution); } @Test public void testCompleteTaskExecution() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setEndTime(LocalDateTime.now()); expectedTaskExecution.setExitCode(0); TaskExecution actualTaskExecution = TaskExecutionCreator.completeExecution(this.taskRepository, expectedTaskExecution); TestVerifierUtils.verifyTaskExecution(expectedTaskExecution, actualTaskExecution); } private TaskExecution getSingleTaskExecutionFromMapRepository(long taskExecutionId) { Map taskMap = ((MapTaskExecutionDao) ((SimpleTaskRepository) this.taskRepository) .getTaskExecutionDao()).getTaskExecutions(); assertTrue("taskExecutionId must be in MapTaskExecutionRepository", taskMap.containsKey(taskExecutionId)); return taskMap.get(taskExecutionId); } @Test public void testCreateTaskExecutionNullEndTime() { TaskExecution expectedTaskExecution = TaskExecutionCreator .createAndStoreTaskExecutionNoParams(this.taskRepository); expectedTaskExecution.setExitCode(-1); assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { TaskExecutionCreator.completeExecution(this.taskRepository, expectedTaskExecution); }); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/support/TaskDatabaseInitializerTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import javax.sql.DataSource; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.BeanCreationException; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.jdbc.autoconfigure.EmbeddedDataSourceConfiguration; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.cloud.task.configuration.TestConfiguration; import org.springframework.cloud.task.repository.dao.MapTaskExecutionDao; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Configuration; import org.springframework.jdbc.core.JdbcTemplate; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.mockito.Mockito.mock; /** * Verifies that task initialization occurs properly. * * @author Glenn Renfro */ public class TaskDatabaseInitializerTests { private AnnotationConfigApplicationContext context; @AfterEach public void close() { if (this.context != null) { this.context.close(); } } @Test public void testDefaultContext() { this.context = new AnnotationConfigApplicationContext(); this.context.register(TestConfiguration.class, EmbeddedDataSourceConfiguration.class, PropertyPlaceholderAutoConfiguration.class); this.context.refresh(); assertThat(new JdbcTemplate(this.context.getBean(DataSource.class)).queryForList("select * from TASK_EXECUTION") .size()).isEqualTo(0); } @Test public void testNoDatabase() { this.context = new AnnotationConfigApplicationContext(EmptyConfiguration.class); SimpleTaskRepository repository = new SimpleTaskRepository(new TaskExecutionDaoFactoryBean()); assertThat(repository.getTaskExecutionDao()).isInstanceOf(MapTaskExecutionDao.class); MapTaskExecutionDao dao = (MapTaskExecutionDao) repository.getTaskExecutionDao(); assertThat(dao.getTaskExecutions().size()).isEqualTo(0); } @Test public void testNoTaskConfiguration() { this.context = new AnnotationConfigApplicationContext(); this.context.register(EmptyConfiguration.class, EmbeddedDataSourceConfiguration.class, PropertyPlaceholderAutoConfiguration.class); this.context.refresh(); assertThat(this.context.getBeanNamesForType(SimpleTaskRepository.class).length).isEqualTo(0); } @Test public void testMultipleDataSourcesContext() { this.context = new AnnotationConfigApplicationContext(); this.context.register(SimpleTaskAutoConfiguration.class, EmbeddedDataSourceConfiguration.class, PropertyPlaceholderAutoConfiguration.class); DataSource dataSource = mock(DataSource.class); this.context.getBeanFactory().registerSingleton("mockDataSource", dataSource); assertThatExceptionOfType(BeanCreationException.class).isThrownBy(() -> { this.context.refresh(); }); } @Configuration public static class EmptyConfiguration { } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/support/TaskExecutionDaoFactoryBeanTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.repository.support; import javax.sql.DataSource; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.springframework.cloud.task.repository.dao.JdbcTaskExecutionDao; import org.springframework.cloud.task.repository.dao.MapTaskExecutionDao; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; import org.springframework.test.util.ReflectionTestUtils; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; /** * @author Michael Minella */ public class TaskExecutionDaoFactoryBeanTests { private ConfigurableApplicationContext context; @AfterEach public void tearDown() { if (this.context != null) { this.context.close(); } } @Test public void testGetObjectType() { assertThat(TaskExecutionDao.class).isEqualTo(new TaskExecutionDaoFactoryBean().getObjectType()); } @Test public void testIsSingleton() { assertThat(new TaskExecutionDaoFactoryBean().isSingleton()).isTrue(); } @Test public void testConstructorValidation() { assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { new TaskExecutionDaoFactoryBean(null); }); } @Test public void testMapTaskExecutionDaoWithoutAppContext() throws Exception { TaskExecutionDaoFactoryBean factoryBean = new TaskExecutionDaoFactoryBean(); TaskExecutionDao taskExecutionDao = factoryBean.getObject(); assertThat(taskExecutionDao instanceof MapTaskExecutionDao).isTrue(); TaskExecutionDao taskExecutionDao2 = factoryBean.getObject(); assertThat(taskExecutionDao == taskExecutionDao2).isTrue(); } @Test public void testDefaultDataSourceConfiguration() throws Exception { this.context = new AnnotationConfigApplicationContext(DefaultDataSourceConfiguration.class); DataSource dataSource = this.context.getBean(DataSource.class); TaskExecutionDaoFactoryBean factoryBean = new TaskExecutionDaoFactoryBean(dataSource); TaskExecutionDao taskExecutionDao = factoryBean.getObject(); assertThat(taskExecutionDao instanceof JdbcTaskExecutionDao).isTrue(); TaskExecutionDao taskExecutionDao2 = factoryBean.getObject(); assertThat(taskExecutionDao == taskExecutionDao2).isTrue(); } @Test public void testSettingTablePrefix() throws Exception { this.context = new AnnotationConfigApplicationContext(DefaultDataSourceConfiguration.class); DataSource dataSource = this.context.getBean(DataSource.class); TaskExecutionDaoFactoryBean factoryBean = new TaskExecutionDaoFactoryBean(dataSource, "foo_"); TaskExecutionDao taskExecutionDao = factoryBean.getObject(); assertThat(ReflectionTestUtils.getField(taskExecutionDao, "tablePrefix")).isEqualTo("foo_"); } @Configuration public static class DefaultDataSourceConfiguration { @Bean public DataSource dataSource() { EmbeddedDatabaseBuilder builder = new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2); return builder.build(); } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/util/TaskExecutionCreator.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.util; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskRepository; /** * Offers ability to create TaskExecutions for the test suite. * * @author Glenn Renfro */ public final class TaskExecutionCreator { private TaskExecutionCreator() { } /** * Creates a sample TaskExecution and stores it in the taskRepository. * @param taskRepository the taskRepository where the taskExecution should be stored. * @return the taskExecution created. */ public static TaskExecution createAndStoreEmptyTaskExecution(TaskRepository taskRepository) { return taskRepository.createTaskExecution(); } /** * Creates a sample TaskExecution and stores it in the taskRepository. * @param taskRepository the taskRepository where the taskExecution should be stored. * @return the taskExecution created. */ public static TaskExecution createAndStoreTaskExecutionNoParams(TaskRepository taskRepository) { TaskExecution expectedTaskExecution = taskRepository.createTaskExecution(); return expectedTaskExecution; } /** * Creates a sample TaskExecution and stores it in the taskRepository with params. * @param taskRepository the taskRepository where the taskExecution should be stored. * @return the taskExecution created. */ public static TaskExecution createAndStoreTaskExecutionWithParams(TaskRepository taskRepository) { TaskExecution expectedTaskExecution = TestVerifierUtils.createSampleTaskExecutionNoArg(); List params = new ArrayList<>(); params.add(UUID.randomUUID().toString()); params.add(UUID.randomUUID().toString()); expectedTaskExecution.setArguments(params); expectedTaskExecution = taskRepository.createTaskExecution(expectedTaskExecution); return expectedTaskExecution; } /** * Updates a sample TaskExecution in the taskRepository. * @param taskRepository the taskRepository where the taskExecution should be updated. * @param expectedTaskExecution the expected task execution. * @return the taskExecution created. */ public static TaskExecution completeExecution(TaskRepository taskRepository, TaskExecution expectedTaskExecution) { return taskRepository.completeTaskExecution(expectedTaskExecution.getExecutionId(), expectedTaskExecution.getExitCode(), expectedTaskExecution.getEndTime(), expectedTaskExecution.getExitMessage(), expectedTaskExecution.getErrorMessage()); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/util/TestDBUtils.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.util; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; import javax.sql.DataSource; import org.springframework.batch.infrastructure.item.database.Order; import org.springframework.batch.infrastructure.item.database.support.DataFieldMaxValueIncrementerFactory; import org.springframework.batch.infrastructure.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.dao.JdbcTaskExecutionDao; import org.springframework.cloud.task.repository.database.PagingQueryProvider; import org.springframework.cloud.task.repository.database.support.SqlPagingQueryProviderFactoryBean; import org.springframework.cloud.task.repository.support.DatabaseType; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; import org.springframework.util.StringUtils; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Provides a suite of tools that allow tests the ability to retrieve results from a * relational database. * * @author Glenn Renfro * @author Ilayaperumal Gopinathan */ public final class TestDBUtils { private TestDBUtils() { } /** * Retrieves the TaskExecution from the datasource. * @param dataSource The datasource from which to retrieve the taskExecution. * @param taskExecutionId The id of the task to search. * @return taskExecution retrieved from the database. */ public static TaskExecution getTaskExecutionFromDB(DataSource dataSource, long taskExecutionId) { String sql = "SELECT * FROM TASK_EXECUTION WHERE " + "TASK_EXECUTION_ID = '" + taskExecutionId + "'"; JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); List rows = jdbcTemplate.query(sql, new RowMapper() { @Override public TaskExecution mapRow(ResultSet rs, int rownumber) throws SQLException { TaskExecution taskExecution = new TaskExecution(rs.getLong("TASK_EXECUTION_ID"), StringUtils.hasText(rs.getString("EXIT_CODE")) ? Integer.valueOf(rs.getString("EXIT_CODE")) : null, rs.getString("TASK_NAME"), rs.getObject("START_TIME", LocalDateTime.class), rs.getObject("END_TIME", LocalDateTime.class), rs.getString("EXIT_MESSAGE"), new ArrayList<>(0), rs.getString("ERROR_MESSAGE"), rs.getString("EXTERNAL_EXECUTION_ID")); return taskExecution; } }); assertThat(rows.size()).as("only one row should be returned").isEqualTo(1); TaskExecution taskExecution = rows.get(0); populateParamsToDB(dataSource, taskExecution); return taskExecution; } /** * Create a pagingQueryProvider specific database type with a findAll. * @param databaseProductName of the database. * @return a {@link PagingQueryProvider} that will return all the requested * information. * @throws Exception exception thrown if error occurs creating * {@link PagingQueryProvider}. */ public static PagingQueryProvider getPagingQueryProvider(String databaseProductName) throws Exception { return getPagingQueryProvider(databaseProductName, null); } /** * Create a pagingQueryProvider specific database type with a query containing a where * clause. * @param databaseProductName of the database. * @param whereClause to be applied to the query. * @return a PagingQueryProvider that will return the requested information. * @throws Exception exception thrown if error occurs creating * {@link PagingQueryProvider}. */ public static PagingQueryProvider getPagingQueryProvider(String databaseProductName, String whereClause) throws Exception { DataSource dataSource = getMockDataSource(databaseProductName); Map orderMap = new TreeMap<>(); orderMap.put("START_TIME", Order.DESCENDING); orderMap.put("TASK_EXECUTION_ID", Order.DESCENDING); SqlPagingQueryProviderFactoryBean factoryBean = new SqlPagingQueryProviderFactoryBean(); factoryBean.setSelectClause(JdbcTaskExecutionDao.SELECT_CLAUSE); factoryBean.setFromClause(JdbcTaskExecutionDao.FROM_CLAUSE); if (whereClause != null) { factoryBean.setWhereClause(whereClause); } factoryBean.setSortKeys(orderMap); factoryBean.setDataSource(dataSource); PagingQueryProvider pagingQueryProvider = null; try { pagingQueryProvider = factoryBean.getObject(); pagingQueryProvider.init(dataSource); } catch (Exception e) { throw new IllegalStateException(e); } return pagingQueryProvider; } /** * Creates a mock DataSource for use in testing. * @param databaseProductName the name of the database type to mock. * @return a mock DataSource. * @throws Exception exception thrown if error occurs creating mock * {@link DataSource}. */ public static DataSource getMockDataSource(String databaseProductName) throws Exception { DatabaseMetaData dmd = mock(DatabaseMetaData.class); DataSource ds = mock(DataSource.class); Connection con = mock(Connection.class); when(ds.getConnection()).thenReturn(con); when(con.getMetaData()).thenReturn(dmd); when(dmd.getDatabaseProductName()).thenReturn(databaseProductName); return ds; } /** * Creates a incrementer for the DataSource. * @param dataSource the datasource that the incrementer will use to record current * id. * @return a DataFieldMaxValueIncrementer object. */ public static DataFieldMaxValueIncrementer getIncrementer(DataSource dataSource) { DataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory( dataSource); String databaseType = null; try { databaseType = DatabaseType.fromMetaData(dataSource).name(); } catch (MetaDataAccessException e) { throw new IllegalStateException(e); } catch (SQLException ex) { throw new IllegalStateException("Unable to detect database type", ex); } return incrementerFactory.getIncrementer(databaseType, "TASK_SEQ"); } private static void populateParamsToDB(DataSource dataSource, TaskExecution taskExecution) { String sql = "SELECT * FROM TASK_EXECUTION_PARAMS WHERE TASK_EXECUTION_ID = '" + taskExecution.getExecutionId() + "'"; JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); List> rows = jdbcTemplate.queryForList(sql); List arguments = new ArrayList<>(); for (Map row : rows) { arguments.add((String) row.get("TASK_PARAM")); } taskExecution.setArguments(arguments); } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/util/TestDefaultConfiguration.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.util; import javax.sql.DataSource; import io.micrometer.observation.ObservationRegistry; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.ApplicationArguments; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.task.configuration.TaskObservationCloudKeyValues; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.listener.TaskLifecycleListener; import org.springframework.cloud.task.listener.TaskListenerExecutorObjectFactory; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskNameResolver; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; import org.springframework.cloud.task.repository.support.SimpleTaskNameResolver; import org.springframework.cloud.task.repository.support.SimpleTaskRepository; import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; /** * Initializes the beans needed to test default task behavior. * * @author Glenn Renfro * @author Michael Minella */ @Configuration @EnableConfigurationProperties(TaskProperties.class) public class TestDefaultConfiguration implements InitializingBean { @Autowired TaskProperties taskProperties; private TaskExecutionDaoFactoryBean factoryBean; @Autowired(required = false) private ApplicationArguments applicationArguments; @Autowired private ConfigurableApplicationContext context; public TestDefaultConfiguration() { } @Bean public TaskRepository taskRepository() { return new SimpleTaskRepository(this.factoryBean); } @Bean public TaskExplorer taskExplorer() { return new SimpleTaskExplorer(this.factoryBean); } @Bean public TaskNameResolver taskNameResolver() { return new SimpleTaskNameResolver(); } @Bean public TaskListenerExecutorObjectFactory taskListenerExecutorObjectProvider( ConfigurableApplicationContext context) { return new TaskListenerExecutorObjectFactory(context); } @Bean public TaskLifecycleListener taskHandler(TaskExplorer taskExplorer, @Autowired(required = false) io.micrometer.core.instrument.MeterRegistry meterRegistry, @Autowired(required = false) ObservationRegistry observationRegistry) { return new TaskLifecycleListener(taskRepository(), taskNameResolver(), this.applicationArguments, taskExplorer, this.taskProperties, taskListenerExecutorObjectProvider(this.context), observationRegistry, new TaskObservationCloudKeyValues()); } @Override public void afterPropertiesSet() throws Exception { if (this.context.getBeanNamesForType(DataSource.class).length == 1) { DataSource dataSource = this.context.getBean(DataSource.class); this.factoryBean = new TaskExecutionDaoFactoryBean(dataSource); } else { this.factoryBean = new TaskExecutionDaoFactoryBean(); } } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/util/TestListener.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.util; import org.springframework.cloud.task.repository.TaskExecution; /** * Provides the basic infrastructure for evaluating if task listener performed properly. * * @author Glenn Renfro */ public abstract class TestListener { /** * Default end message to use for the start message. */ public static final String START_MESSAGE = "FOO"; /** * Default end message to use for the error message. */ public static final String ERROR_MESSAGE = "BAR"; /** * Default end message to use for the end message. */ public static final String END_MESSAGE = "BAZ"; protected boolean isTaskStartup; protected boolean isTaskEnd; protected boolean isTaskFailed; protected TaskExecution taskExecution; protected Throwable throwable; /** * Indicates if the task listener was called during task create step. * @return true if task listener was called during task creation, else false. */ public boolean isTaskStartup() { return this.isTaskStartup; } /** * Indicates if the task listener was called during task end. * @return true if the task listener was called during task end, else false. */ public boolean isTaskEnd() { return this.isTaskEnd; } /** * Indicates if the task listener was called during task failed step. * @return true if task listener was called during task failure, else false. */ public boolean isTaskFailed() { return this.isTaskFailed; } /** * Task Execution that was updated during listener call. * @return instance of TaskExecution. */ public TaskExecution getTaskExecution() { return this.taskExecution; } /** * The throwable that was sent with the task if task failed. * @return instance of Throwable. */ public Throwable getThrowable() { return this.throwable; } } ================================================ FILE: spring-cloud-task-core/src/test/java/org/springframework/cloud/task/util/TestVerifierUtils.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.util; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; import java.util.UUID; import ch.qos.logback.classic.spi.LoggingEvent; import ch.qos.logback.core.Appender; import org.mockito.ArgumentMatcher; import org.slf4j.LoggerFactory; import org.springframework.cloud.task.repository.TaskExecution; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Offers utils to test the results produced by the code being tested. * * @author Glenn Renfro */ public final class TestVerifierUtils { /** * The default number of arguments to use for the test TaskExecutions. */ public static final int ARG_SIZE = 5; private TestVerifierUtils() { } /** * Creates a mock {@link Appender} to be added to the root logger. * @return reference to the mock appender. */ public static Appender getMockAppender() { ch.qos.logback.classic.Logger root = (ch.qos.logback.classic.Logger) LoggerFactory .getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME); final Appender mockAppender = mock(Appender.class); when(mockAppender.getName()).thenReturn("MOCK"); root.addAppender(mockAppender); return mockAppender; } /** * Verifies that the log sample is contained within the content that was written to * the mock appender. * @param mockAppender The appender that is associated with the test. * @param logSample The string to search for in the log entry. */ public static void verifyLogEntryExists(Appender mockAppender, final String logSample) { verify(mockAppender).doAppend(argThat(new ArgumentMatcher() { @Override public boolean matches(final Object argument) { return ((LoggingEvent) argument).getFormattedMessage().contains(logSample); } })); } /** * Creates a fully populated TaskExecution (except args) for testing. * @return instance of a TaskExecution */ public static TaskExecution createSampleTaskExecutionNoArg() { Random randomGenerator = new Random(); long executionId = randomGenerator.nextLong(); String taskName = UUID.randomUUID().toString(); return new TaskExecution(executionId, null, taskName, LocalDateTime.now(), null, null, new ArrayList<>(), null, null); } /** * Creates a fully populated TaskExecution (except args) for testing. * @return instance of a TaskExecution. */ public static TaskExecution endSampleTaskExecutionNoArg() { Random randomGenerator = new Random(); int exitCode = randomGenerator.nextInt(); long executionId = randomGenerator.nextLong(); String taskName = UUID.randomUUID().toString(); String exitMessage = UUID.randomUUID().toString(); return new TaskExecution(executionId, exitCode, taskName, LocalDateTime.now(), LocalDateTime.now(), exitMessage, new ArrayList<>(), null, null); } /** * Creates a fully populated TaskExecution for testing. * @param executionId the taskExecutionId to be created. * @return instance of the TaskExecution. */ public static TaskExecution createSampleTaskExecution(long executionId) { String taskName = UUID.randomUUID().toString(); String externalExecutionId = UUID.randomUUID().toString(); List args = new ArrayList<>(ARG_SIZE); for (int i = 0; i < ARG_SIZE; i++) { args.add(UUID.randomUUID().toString()); } return new TaskExecution(executionId, null, taskName, LocalDateTime.now(), null, null, args, null, externalExecutionId); } /** * Verifies that all the fields in between the expected and actual are the same. * @param expectedTaskExecution The expected value for the task execution. * @param actualTaskExecution The actual value for the task execution. */ public static void verifyTaskExecution(TaskExecution expectedTaskExecution, TaskExecution actualTaskExecution) { assertThat(actualTaskExecution.getExecutionId()).as("taskExecutionId must be equal") .isEqualTo(expectedTaskExecution.getExecutionId()); if (actualTaskExecution.getStartTime() != null) { assertThat(actualTaskExecution.getStartTime().getHour() == expectedTaskExecution.getStartTime().getHour()) .as("startTime hour must be equal") .isTrue(); assertThat( actualTaskExecution.getStartTime().getMinute() == expectedTaskExecution.getStartTime().getMinute()) .as("startTime minute must be equal") .isTrue(); assertThat( actualTaskExecution.getStartTime().getSecond() == expectedTaskExecution.getStartTime().getSecond()) .as("startTime second must be equal") .isTrue(); assertThat(actualTaskExecution.getStartTime().getDayOfYear() == expectedTaskExecution.getStartTime() .getDayOfYear()).as("startTime day must be equal").isTrue(); assertThat(actualTaskExecution.getStartTime().getYear() == expectedTaskExecution.getStartTime().getYear()) .as("startTime year must be equal") .isTrue(); assertThat(actualTaskExecution.getStartTime().getMonthValue() == expectedTaskExecution.getStartTime() .getMonthValue()).as("startTime month must be equal").isTrue(); } if (actualTaskExecution.getEndTime() != null) { assertThat(actualTaskExecution.getEndTime().getHour() == expectedTaskExecution.getEndTime().getHour()) .as("endTime hour must be equal") .isTrue(); assertThat(actualTaskExecution.getEndTime().getMinute() == expectedTaskExecution.getEndTime().getMinute()) .as("endTime minute must be equal") .isTrue(); assertThat(actualTaskExecution.getEndTime().getSecond() == expectedTaskExecution.getEndTime().getSecond()) .as("endTime second must be equal") .isTrue(); assertThat(actualTaskExecution.getEndTime().getDayOfYear() == expectedTaskExecution.getEndTime() .getDayOfYear()).as("endTime day must be equal").isTrue(); assertThat(actualTaskExecution.getEndTime().getYear() == expectedTaskExecution.getEndTime().getYear()) .as("endTime year must be equal") .isTrue(); assertThat(actualTaskExecution.getEndTime().getMonthValue() == expectedTaskExecution.getEndTime() .getMonthValue()).as("endTime month must be equal").isTrue(); } assertThat(actualTaskExecution.getExitCode()).as("exitCode must be equal") .isEqualTo(expectedTaskExecution.getExitCode()); assertThat(actualTaskExecution.getTaskName()).as("taskName must be equal") .isEqualTo(expectedTaskExecution.getTaskName()); assertThat(actualTaskExecution.getExitMessage()).as("exitMessage must be equal") .isEqualTo(expectedTaskExecution.getExitMessage()); assertThat(actualTaskExecution.getErrorMessage()).as("errorMessage must be equal") .isEqualTo(expectedTaskExecution.getErrorMessage()); assertThat(actualTaskExecution.getExternalExecutionId()).as("externalExecutionId must be equal") .isEqualTo(expectedTaskExecution.getExternalExecutionId()); assertThat(actualTaskExecution.getParentExecutionId()).as("parentExecutionId must be equal") .isEqualTo(expectedTaskExecution.getParentExecutionId()); if (expectedTaskExecution.getArguments() != null) { assertThat(actualTaskExecution.getArguments()).as("arguments should not be null").isNotNull(); assertThat(actualTaskExecution.getArguments().size()) .as("arguments result set count should match expected count") .isEqualTo(expectedTaskExecution.getArguments().size()); } else { assertThat(actualTaskExecution.getArguments()).as("arguments should be null").isNull(); } Set args = new HashSet<>(); for (String param : expectedTaskExecution.getArguments()) { args.add(param); } for (String arg : actualTaskExecution.getArguments()) { assertThat(args.contains(arg)).as("arg must exist in the repository").isTrue(); } } } ================================================ FILE: spring-cloud-task-core/src/test/resources/application.properties ================================================ logging.level.org.springframework.cloud.task=debug ================================================ FILE: spring-cloud-task-core/src/test/resources/micrometer/pcf-scs-info.json ================================================ { "sso":[{ "name": "sso", "label": "sso", "plan": "notfree", "tags": ["configuration"], "credentials":{ "uri": "https://pivotal.io", "client_id": "fakeClientId", "client_secret": "fakeSecret", "access_token_uri": "token" } }]} ================================================ FILE: spring-cloud-task-dependencies/pom.xml ================================================ 4.0.0 spring-cloud-task-dependencies 5.0.2-SNAPSHOT pom Spring Cloud Task Dependencies Spring Cloud Task Dependencies spring-cloud-dependencies-parent org.springframework.cloud 5.0.2-SNAPSHOT org.springframework.cloud spring-cloud-starter-task ${project.version} org.springframework.cloud spring-cloud-task-core ${project.version} org.springframework.cloud spring-cloud-task-batch ${project.version} org.springframework.cloud spring-cloud-task-stream ${project.version} spring spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false ================================================ FILE: spring-cloud-task-integration-tests/pom.xml ================================================ spring-cloud-task-parent org.springframework.cloud 5.0.2-SNAPSHOT 4.0.0 Spring Cloud Task Integration Tests Tests the integration between Task/Batch Events and Stream Binders org.springframework.cloud spring-cloud-task-integration-tests 5.0.2-SNAPSHOT org.springframework.cloud spring-cloud-task-dependencies ${project.version} pom import org.springframework.cloud spring-cloud-task-stream test org.springframework.boot spring-boot-starter-test test org.springframework.cloud spring-cloud-starter-stream-rabbit test org.springframework.cloud spring-cloud-stream-binder-rabbit-test-support ${spring-cloud-stream-binder-rabbit.version} test org.springframework.batch spring-batch-core test org.springframework.cloud spring-cloud-task-batch com.h2database h2 test org.springframework.cloud spring-cloud-test-support ${spring-cloud-commons.version} test org.springframework.integration spring-integration-core test org.springframework.integration spring-integration-jdbc test org.testcontainers testcontainers test org.testcontainers testcontainers-junit-jupiter test org.testcontainers testcontainers-rabbitmq test org.testcontainers testcontainers-db2 test org.springframework.cloud spring-cloud-stream-test-binder ${spring-cloud-stream.version} test org.springframework.cloud spring-cloud-stream test org.springframework.integration spring-integration-test test org.testcontainers testcontainers-mariadb test org.mariadb.jdbc mariadb-java-client test org.springframework.boot spring-boot-starter-jdbc test maven-deploy-plugin true integrationTests org.apache.maven.plugins maven-surefire-plugin 2.22.2 false skipIntegrationTests true org.apache.maven.plugins maven-surefire-plugin 2.22.2 true ================================================ FILE: spring-cloud-task-integration-tests/src/test/java/configuration/JobConfiguration.java ================================================ /* * Copyright 2021-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package configuration; import java.util.Arrays; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.Step; import org.springframework.batch.core.step.StepContribution; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.infrastructure.item.Chunk; import org.springframework.batch.infrastructure.item.ItemProcessor; import org.springframework.batch.infrastructure.item.ItemWriter; import org.springframework.batch.infrastructure.item.support.ListItemReader; import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.transaction.PlatformTransactionManager; /** * @author Glenn Renfro */ @Configuration @ConditionalOnProperty(prefix = "spring.cloud.task.test", name = "enable-job-configuration", havingValue = "true") public class JobConfiguration { private static final int DEFAULT_CHUNK_COUNT = 3; @Autowired private JobRepository jobRepository; @Autowired private PlatformTransactionManager transactionManager; @Bean public Job job() { return new JobBuilder("job", this.jobRepository).start(step1()).next(step2()).build(); } @Bean public Step step1() { return new StepBuilder("step1", this.jobRepository).tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { System.out.println("Executed"); return RepeatStatus.FINISHED; } }, transactionManager).build(); } @Bean public Step step2() { return new StepBuilder("step2", this.jobRepository) .chunk(DEFAULT_CHUNK_COUNT, transactionManager) .reader(new ListItemReader<>(Arrays.asList("1", "2", "3", "4", "5", "6"))) .processor(new ItemProcessor() { @Override public String process(String item) throws Exception { return String.valueOf(Integer.parseInt(item) * -1); } }) .writer(new ItemWriter() { @Override public void write(Chunk items) throws Exception { for (Object item : items) { System.out.println(">> " + item); } } }) .build(); } } ================================================ FILE: spring-cloud-task-integration-tests/src/test/java/configuration/JobSkipConfiguration.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package configuration; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.Step; import org.springframework.batch.core.step.StepContribution; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.infrastructure.item.ItemProcessor; import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.transaction.PlatformTransactionManager; /** * @author Glenn Renfro */ @Configuration @ConditionalOnProperty(prefix = "spring.cloud.task.test", name = "enable-fail-job-configuration") public class JobSkipConfiguration { @Autowired private JobRepository jobRepository; @Autowired private PlatformTransactionManager transactionManager; @Bean public Job job() { return new JobBuilder("job", this.jobRepository).start(step1()).next(step2()).build(); } @Bean public Step step1() { return new StepBuilder("step1", this.jobRepository).tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { System.out.println("Executed"); return RepeatStatus.FINISHED; } }, transactionManager).build(); } @Bean public Step step2() { return new StepBuilder("step2", this.jobRepository).chunk(3, transactionManager) .faultTolerant() .skip(IllegalStateException.class) .skipLimit(100) .reader(new SkipItemReader()) .processor(new ItemProcessor() { @Override public String process(Object item) throws Exception { return String.valueOf(Integer.parseInt((String) item) * -1); } }) .writer(new SkipItemWriter()) .build(); } } ================================================ FILE: spring-cloud-task-integration-tests/src/test/java/configuration/SkipItemReader.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package configuration; import org.springframework.batch.infrastructure.item.ItemReader; import org.springframework.batch.infrastructure.item.NonTransientResourceException; import org.springframework.batch.infrastructure.item.ParseException; import org.springframework.batch.infrastructure.item.UnexpectedInputException; /** * @author Glenn Renfro */ public class SkipItemReader implements ItemReader { int failCount = 0; boolean finished = false; @Override public Object read() throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException { String result = "1"; if (this.failCount < 2) { this.failCount++; throw new IllegalStateException("Reader FOOBAR"); } if (this.finished) { result = null; } this.finished = true; return result; } } ================================================ FILE: spring-cloud-task-integration-tests/src/test/java/configuration/SkipItemWriter.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package configuration; import org.springframework.batch.infrastructure.item.Chunk; import org.springframework.batch.infrastructure.item.ItemWriter; /** * @author Glenn Renfro */ public class SkipItemWriter implements ItemWriter { int failCount = 0; @Override public void write(Chunk items) throws Exception { if (this.failCount < 2) { this.failCount++; throw new IllegalStateException("Writer FOOBAR"); } for (Object item : items) { System.out.println(">> " + item); } } } ================================================ FILE: spring-cloud-task-integration-tests/src/test/java/org/springframework/cloud/task/executionid/TaskStartApplication.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.executionid; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.context.annotation.Bean; /** * @author Glenn Renfro */ @EnableTask @SpringBootApplication public class TaskStartApplication { public static void main(String[] args) { SpringApplication.run(TaskStartApplication.class, args); } @Bean public CommandLineRunner testCommandLineRunner() { return new CommandLineRunner() { @Override public void run(String... strings) throws Exception { for (String s : strings) { System.out.println("Test" + s); } } }; } } ================================================ FILE: spring-cloud-task-integration-tests/src/test/java/org/springframework/cloud/task/executionid/TaskStartTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.executionid; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.UUID; import javax.sql.DataSource; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; import org.testcontainers.mariadb.MariaDBContainer; import org.testcontainers.utility.DockerImageName; import org.springframework.boot.SpringApplication; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; import org.springframework.cloud.task.repository.support.SimpleTaskRepository; import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.context.ApplicationContextException; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.core.env.ConfigurableEnvironment; import org.springframework.core.env.MapPropertySource; import org.springframework.core.env.MutablePropertySources; import org.springframework.core.env.StandardEnvironment; import org.springframework.core.io.ClassPathResource; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.simple.SimpleJdbcInsert; import org.springframework.jdbc.datasource.DriverManagerDataSource; import org.springframework.jdbc.datasource.init.DataSourceInitializer; import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; @Testcontainers public class TaskStartTests { private final static int WAIT_INTERVAL = 500; private final static int MAX_WAIT_TIME = 5000; private final static String TASK_NAME = "TASK_LAUNCHER_SINK_TEST"; private static final DockerImageName MARIADB_IMAGE = DockerImageName.parse("mariadb:10.9.3"); /** * Provide mariadb test container for tests. */ @Container public static MariaDBContainer mariaDBContainer = new MariaDBContainer(MARIADB_IMAGE); private DataSource dataSource; private Map properties; private TaskExplorer taskExplorer; private TaskRepository taskRepository; private ConfigurableApplicationContext applicationContext; @AfterEach public void tearDown() { if (this.applicationContext != null && this.applicationContext.isActive()) { this.applicationContext.close(); } } @BeforeEach public void setup() { DriverManagerDataSource dataSource = new DriverManagerDataSource(); dataSource.setDriverClassName(mariaDBContainer.getDriverClassName()); dataSource.setUrl(mariaDBContainer.getJdbcUrl()); dataSource.setUsername(mariaDBContainer.getUsername()); dataSource.setPassword(mariaDBContainer.getPassword()); this.dataSource = dataSource; TaskExecutionDaoFactoryBean factoryBean = new TaskExecutionDaoFactoryBean(dataSource); this.taskExplorer = new SimpleTaskExplorer(factoryBean); this.taskRepository = new SimpleTaskRepository(factoryBean); this.properties = new HashMap<>(); this.properties.put("spring.datasource.url", mariaDBContainer.getJdbcUrl()); this.properties.put("spring.datasource.username", mariaDBContainer.getUsername()); this.properties.put("spring.datasource.password", mariaDBContainer.getPassword()); this.properties.put("spring.datasource.driverClassName", mariaDBContainer.getDriverClassName()); this.properties.put("spring.application.name", TASK_NAME); this.properties.put("spring.cloud.task.initialize-enabled", "false"); JdbcTemplate template = new JdbcTemplate(this.dataSource); template.execute("DROP TABLE IF EXISTS TASK_TASK_BATCH"); template.execute("DROP TABLE IF EXISTS TASK_SEQ"); template.execute("DROP TABLE IF EXISTS TASK_EXECUTION_PARAMS"); template.execute("DROP TABLE IF EXISTS TASK_EXECUTION"); template.execute("DROP TABLE IF EXISTS TASK_LOCK"); template.execute("DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_SEQ"); template.execute("DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_CONTEXT"); template.execute("DROP TABLE IF EXISTS BATCH_STEP_EXECUTION"); template.execute("DROP TABLE IF EXISTS BATCH_JOB_SEQ"); template.execute("DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_SEQ"); template.execute("DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_PARAMS"); template.execute("DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_CONTEXT"); template.execute("DROP TABLE IF EXISTS BATCH_JOB_EXECUTION"); template.execute("DROP TABLE IF EXISTS BATCH_JOB_INSTANCE"); template.execute("DROP SEQUENCE IF EXISTS TASK_SEQ"); DataSourceInitializer initializer = new DataSourceInitializer(); initializer.setDataSource(this.dataSource); ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator(); databasePopulator.addScript(new ClassPathResource("/org/springframework/cloud/task/schema-mariadb.sql")); initializer.setDatabasePopulator(databasePopulator); initializer.afterPropertiesSet(); } @Test public void testWithGeneratedTaskExecution() throws Exception { this.taskRepository.createTaskExecution(); assertThat(this.taskExplorer.getTaskExecutionCount()).as("Only one row is expected").isEqualTo(1); this.applicationContext = getTaskApplication(1).run(new String[0]); assertThat(waitForDBToBePopulated()).isTrue(); Page taskExecutions = this.taskExplorer.findAll(PageRequest.of(0, 10)); TaskExecution te = taskExecutions.iterator().next(); assertThat(taskExecutions.getTotalElements()).as("Only one row is expected").isEqualTo(1); assertThat(taskExecutions.iterator().next().getExitCode().intValue()).as("return code should be 0") .isEqualTo(0); } @Test public void testWithGeneratedTaskExecutionWithName() throws Exception { final String TASK_EXECUTION_NAME = "PRE-EXECUTION-TEST-NAME"; this.taskRepository.createTaskExecution(TASK_EXECUTION_NAME); assertThat(this.taskExplorer.getTaskExecutionCount()).as("Only one row is expected").isEqualTo(1); assertThat(this.taskExplorer.getTaskExecution(1).getTaskName()).isEqualTo(TASK_EXECUTION_NAME); this.applicationContext = getTaskApplication(1).run(new String[0]); assertThat(waitForDBToBePopulated()).isTrue(); Page taskExecutions = this.taskExplorer.findAll(PageRequest.of(0, 10)); TaskExecution te = taskExecutions.iterator().next(); assertThat(taskExecutions.getTotalElements()).as("Only one row is expected").isEqualTo(1); assertThat(taskExecutions.iterator().next().getExitCode().intValue()).as("return code should be 0") .isEqualTo(0); assertThat(this.taskExplorer.getTaskExecution(1).getTaskName()).isEqualTo("batchEvents"); } @Test public void testWithGeneratedTaskExecutionWithExistingDate() throws Exception { final String TASK_EXECUTION_NAME = "PRE-EXECUTION-TEST-NAME"; LocalDateTime startDate = LocalDateTime.now(); Thread.sleep(500); TaskExecution taskExecution = new TaskExecution(1, 0, TASK_EXECUTION_NAME, startDate, LocalDateTime.now(), "foo", Collections.emptyList(), "foo", "bar", null); this.taskRepository.createTaskExecution(taskExecution); assertThat(this.taskExplorer.getTaskExecutionCount()).as("Only one row is expected").isEqualTo(1); this.applicationContext = getTaskApplication(1).run(new String[0]); assertThat(waitForDBToBePopulated()).isTrue(); Page taskExecutions = this.taskExplorer.findAll(PageRequest.of(0, 10)); assertThat(taskExecutions.getTotalElements()).as("Only one row is expected").isEqualTo(1); assertThat(taskExecutions.iterator().next().getExitCode().intValue()).as("return code should be 0") .isEqualTo(0); assertThat(this.taskExplorer.getTaskExecution(1).getStartTime().isEqual(startDate)).isTrue(); } @Test public void testWithNoTaskExecution() throws Exception { assertThatExceptionOfType(ApplicationContextException.class).isThrownBy(() -> { this.applicationContext = getTaskApplication(55).run(new String[0]); }); } @Test public void testCompletedTaskExecution() throws Exception { this.taskRepository.createTaskExecution(); assertThat(this.taskExplorer.getTaskExecutionCount()).as("Only one row is expected").isEqualTo(1); this.taskRepository.completeTaskExecution(1, 0, LocalDateTime.now(), ""); assertThatExceptionOfType(ApplicationContextException.class).isThrownBy(() -> { this.applicationContext = getTaskApplication(1).run(new String[0]); }); } @Test public void testDuplicateTaskExecutionWithSingleInstanceEnabled() throws Exception { String[] params = { "--spring.cloud.task.single-instance-enabled=true", "--spring.cloud.task.name=foo" }; boolean testFailed = false; try { this.taskRepository.createTaskExecution(); assertThat(this.taskExplorer.getTaskExecutionCount()).as("Only one row is expected").isEqualTo(1); enableLock("foo"); getTaskApplication(1).run(params); } catch (ApplicationContextException taskException) { assertThat(taskException.getCause().getMessage()).isEqualTo("Failed to process " + "@BeforeTask or @AfterTask annotation because: Task with name \"foo\" is already running."); testFailed = true; } assertThat(testFailed) .as("Expected TaskExecutionException for because of " + "single-instance-enabled is enabled") .isTrue(); } @Test public void testDuplicateTaskExecutionWithSingleInstanceDisabled() throws Exception { this.taskRepository.createTaskExecution(); TaskExecution execution = this.taskRepository.createTaskExecution(); this.taskRepository.startTaskExecution(execution.getExecutionId(), "bar", LocalDateTime.now(), new ArrayList<>(), ""); String[] params = { "--spring.cloud.task.name=bar" }; enableLock("bar"); this.applicationContext = getTaskApplication(1).run(params); assertThat(waitForDBToBePopulated()).isTrue(); } private SpringApplication getTaskApplication(Integer executionId) { SpringApplication myapp = new SpringApplication(TaskStartApplication.class); Map myMap = new HashMap<>(); ConfigurableEnvironment environment = new StandardEnvironment(); MutablePropertySources propertySources = environment.getPropertySources(); myMap.put("spring.cloud.task.executionid", executionId); myMap.put("spring.datasource.url", mariaDBContainer.getJdbcUrl()); myMap.put("spring.datasource.username", mariaDBContainer.getUsername()); myMap.put("spring.datasource.password", mariaDBContainer.getPassword()); myMap.put("spring.datasource.driverClassName", mariaDBContainer.getDriverClassName()); propertySources.addFirst(new MapPropertySource("EnvrionmentTestPropsource", myMap)); myapp.setEnvironment(environment); return myapp; } private boolean tableExists() throws SQLException { boolean result; try (Connection conn = this.dataSource.getConnection(); ResultSet res = conn.getMetaData().getTables(null, null, "TASK_EXECUTION", new String[] { "TABLE" })) { result = res.next(); } return result; } private boolean waitForDBToBePopulated() throws Exception { boolean isDbPopulated = false; for (int waitTime = 0; waitTime <= MAX_WAIT_TIME; waitTime += WAIT_INTERVAL) { Thread.sleep(WAIT_INTERVAL); if (tableExists() && this.taskExplorer.getTaskExecutionCount() > 0) { isDbPopulated = true; break; } } return isDbPopulated; } private void enableLock(String lockKey) { SimpleJdbcInsert taskLockInsert = new SimpleJdbcInsert(this.dataSource).withTableName("TASK_LOCK"); Map taskLockParams = new HashMap<>(); taskLockParams.put("LOCK_KEY", UUID.nameUUIDFromBytes(lockKey.getBytes()).toString()); taskLockParams.put("REGION", "DEFAULT"); taskLockParams.put("CLIENT_ID", "aClientID"); taskLockParams.put("CREATED_DATE", LocalDateTime.now()); taskLockInsert.execute(taskLockParams); } } ================================================ FILE: spring-cloud-task-integration-tests/src/test/java/org/springframework/cloud/task/initializer/TaskInitializerTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.initializer; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import javax.sql.DataSource; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; import org.testcontainers.mariadb.MariaDBContainer; import org.testcontainers.utility.DockerImageName; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.cloud.task.executionid.TaskStartApplication; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.context.ApplicationContextException; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.core.env.ConfigurableEnvironment; import org.springframework.core.env.MapPropertySource; import org.springframework.core.env.MutablePropertySources; import org.springframework.core.env.StandardEnvironment; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.datasource.DriverManagerDataSource; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; @Testcontainers public class TaskInitializerTests { private final static int WAIT_INTERVAL = 500; private final static int MAX_WAIT_TIME = 5000; private DataSource dataSource; private TaskExplorer taskExplorer; private ConfigurableApplicationContext applicationContext; private static final DockerImageName MARIADB_IMAGE = DockerImageName.parse("mariadb:10.9.3"); /** * Provide mariadb test container for tests. */ @Container public static MariaDBContainer mariaDBContainer = new MariaDBContainer(MARIADB_IMAGE); @AfterEach public void tearDown() { if (this.applicationContext != null && this.applicationContext.isActive()) { this.applicationContext.close(); } } @Autowired public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; TaskExecutionDaoFactoryBean factoryBean = new TaskExecutionDaoFactoryBean(dataSource); this.taskExplorer = new SimpleTaskExplorer(factoryBean); } @BeforeEach public void setup() { DriverManagerDataSource dataSource = new DriverManagerDataSource(); dataSource.setDriverClassName(mariaDBContainer.getDriverClassName()); dataSource.setUrl(mariaDBContainer.getJdbcUrl()); dataSource.setUsername(mariaDBContainer.getUsername()); dataSource.setPassword(mariaDBContainer.getPassword()); this.dataSource = dataSource; TaskExecutionDaoFactoryBean factoryBean = new TaskExecutionDaoFactoryBean(dataSource); this.taskExplorer = new SimpleTaskExplorer(factoryBean); JdbcTemplate template = new JdbcTemplate(this.dataSource); template.execute("DROP TABLE IF EXISTS TASK_TASK_BATCH"); template.execute("DROP TABLE IF EXISTS TASK_SEQ"); template.execute("DROP TABLE IF EXISTS TASK_EXECUTION_PARAMS"); template.execute("DROP TABLE IF EXISTS TASK_EXECUTION"); template.execute("DROP TABLE IF EXISTS TASK_LOCK"); template.execute("DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_SEQ"); template.execute("DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_CONTEXT"); template.execute("DROP TABLE IF EXISTS BATCH_STEP_EXECUTION"); template.execute("DROP TABLE IF EXISTS BATCH_JOB_SEQ"); template.execute("DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_SEQ"); template.execute("DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_PARAMS"); template.execute("DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_CONTEXT"); template.execute("DROP TABLE IF EXISTS BATCH_JOB_EXECUTION"); template.execute("DROP TABLE IF EXISTS BATCH_JOB_INSTANCE"); template.execute("DROP SEQUENCE IF EXISTS TASK_SEQ"); } @Test public void testNotInitialized() throws Exception { SpringApplication myapp = getTaskApplication(); String[] properties = { "--spring.cloud.task.initialize-enabled=false" }; assertThatExceptionOfType(ApplicationContextException.class).isThrownBy(() -> { this.applicationContext = myapp.run(properties); }); } @Test public void testWithInitialized() throws Exception { SpringApplication myapp = getTaskApplication(); String[] properties = { "--spring.cloud.task.initialize-enabled=true" }; this.applicationContext = myapp.run(properties); assertThat(waitForDBToBePopulated()).isTrue(); Page taskExecutions = this.taskExplorer.findAll(PageRequest.of(0, 10)); TaskExecution te = taskExecutions.iterator().next(); assertThat(taskExecutions.getTotalElements()).as("Only one row is expected").isEqualTo(1); assertThat(taskExecutions.iterator().next().getExitCode().intValue()).as("return code should be 0") .isEqualTo(0); } @Test public void testNotInitializedOriginalProperty() throws Exception { SpringApplication myapp = getTaskApplication(); String[] properties = { "--spring.cloud.task.initialize.enable=false" }; assertThatExceptionOfType(ApplicationContextException.class).isThrownBy(() -> { this.applicationContext = myapp.run(properties); }); } @Test public void testWithInitializedOriginalProperty() throws Exception { SpringApplication myapp = getTaskApplication(); String[] properties = { "--spring.cloud.task.initialize.enable=true" }; this.applicationContext = myapp.run(properties); assertThat(waitForDBToBePopulated()).isTrue(); Page taskExecutions = this.taskExplorer.findAll(PageRequest.of(0, 10)); TaskExecution te = taskExecutions.iterator().next(); assertThat(taskExecutions.getTotalElements()).as("Only one row is expected").isEqualTo(1); assertThat(taskExecutions.iterator().next().getExitCode().intValue()).as("return code should be 0") .isEqualTo(0); } private boolean tableExists() throws SQLException { boolean result; try (Connection conn = this.dataSource.getConnection(); ResultSet res = conn.getMetaData().getTables(null, null, "TASK_EXECUTION", new String[] { "TABLE" })) { result = res.next(); } return result; } private boolean waitForDBToBePopulated() throws Exception { boolean isDbPopulated = false; for (int waitTime = 0; waitTime <= MAX_WAIT_TIME; waitTime += WAIT_INTERVAL) { Thread.sleep(WAIT_INTERVAL); if (tableExists() && this.taskExplorer.getTaskExecutionCount() > 0) { isDbPopulated = true; break; } } return isDbPopulated; } private SpringApplication getTaskApplication() { SpringApplication myapp = new SpringApplication(TaskStartApplication.class); Map myMap = new HashMap<>(); ConfigurableEnvironment environment = new StandardEnvironment(); MutablePropertySources propertySources = environment.getPropertySources(); myMap.put("spring.datasource.url", mariaDBContainer.getJdbcUrl()); myMap.put("spring.datasource.username", mariaDBContainer.getUsername()); myMap.put("spring.datasource.password", mariaDBContainer.getPassword()); myMap.put("spring.datasource.driverClassName", mariaDBContainer.getDriverClassName()); propertySources.addFirst(new MapPropertySource("EnvrionmentTestPropsource", myMap)); myapp.setEnvironment(environment); return myapp; } } ================================================ FILE: spring-cloud-task-integration-tests/src/test/java/org/springframework/cloud/task/listener/BatchExecutionEventTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import java.util.ArrayList; import java.util.List; import java.util.UUID; import configuration.JobConfiguration; import configuration.JobSkipConfiguration; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import tools.jackson.databind.DeserializationFeature; import tools.jackson.databind.ObjectMapper; import tools.jackson.databind.json.JsonMapper; import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.cloud.stream.binder.test.OutputDestination; import org.springframework.cloud.stream.binder.test.TestChannelBinderConfiguration; import org.springframework.cloud.task.batch.listener.support.JobExecutionEvent; import org.springframework.cloud.task.batch.listener.support.StepExecutionEvent; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Import; import org.springframework.messaging.Message; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro */ public class BatchExecutionEventTests { private static final String TASK_NAME = "taskEventTest"; private ObjectMapper objectMapper; private ConfigurableApplicationContext applicationContext; @BeforeEach public void setup() { objectMapper = JsonMapper.builder().disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES).build(); } @AfterEach public void tearDown() { if (this.applicationContext != null && this.applicationContext.isActive()) { this.applicationContext.close(); } } @Test public void testContext() { this.applicationContext = new SpringApplicationBuilder() .sources(TestChannelBinderConfiguration.getCompleteConfiguration(BatchEventsApplication.class)) .web(WebApplicationType.NONE) .build() .run(getCommandLineParams("--spring.cloud.stream.bindings.job-execution-events.destination=bazbar")); assertThat(this.applicationContext.getBean("jobExecutionEventsListener")).isNotNull(); assertThat(this.applicationContext.getBean("stepExecutionEventsListener")).isNotNull(); assertThat(this.applicationContext.getBean("chunkEventsListener")).isNotNull(); assertThat(this.applicationContext.getBean("itemReadEventsListener")).isNotNull(); assertThat(this.applicationContext.getBean("itemWriteEventsListener")).isNotNull(); assertThat(this.applicationContext.getBean("itemProcessEventsListener")).isNotNull(); assertThat(this.applicationContext.getBean("skipEventsListener")).isNotNull(); } @Test public void testJobEventListener() throws Exception { List> result = testListener( "--spring.cloud.task.batch.events.jobExecutionEventBindingName=foobar", "foobar", 1); JobExecutionEvent jobExecutionEvent = this.objectMapper.readValue(result.get(0).getPayload(), JobExecutionEvent.class); Assertions.assertThat(jobExecutionEvent.getJobInstance().getJobName()) .isEqualTo("job") .as("Job name should be job"); } @Test public void testStepEventListener() throws Exception { final String bindingName = "step-execution-foobar"; List> result = testListener( "--spring.cloud.task.batch.events.stepExecutionEventBindingName=" + bindingName, bindingName, 4); int stepOneCount = 0; int stepTwoCount = 0; for (int i = 0; i < 4; i++) { StepExecutionEvent stepExecutionEvent = this.objectMapper.readValue(result.get(i).getPayload(), StepExecutionEvent.class); if (stepExecutionEvent.getStepName().equals("step1")) { stepOneCount++; } if (stepExecutionEvent.getStepName().equals("step2")) { stepTwoCount++; } } assertThat(stepOneCount).as("the number of step1 events did not match").isEqualTo(2); assertThat(stepTwoCount).as("the number of step2 events did not match").isEqualTo(2); } @Test public void testItemProcessEventListener() { final String bindingName = "item-execution-foobar"; List> result = testListener( "--spring.cloud.task.batch.events.itemProcessEventBindingName=" + bindingName, bindingName, 1); String value = new String(result.get(0).getPayload()); assertThat(value).isEqualTo("item did not equal result after processing"); } @Test public void testChunkListener() { final String bindingName = "chunk-events-foobar"; List> result = testListener( "--spring.cloud.task.batch.events.chunkEventBindingName=" + bindingName, bindingName, 2); String value = new String(result.get(0).getPayload()); assertThat(value).isEqualTo("Before Chunk Processing"); value = new String(result.get(1).getPayload()); assertThat(value).isEqualTo("After Chunk Processing"); } @Test public void testWriteListener() { final String bindingName = "item-write-events-foobar"; List> result = testListener( "--spring.cloud.task.batch.events.itemWriteEventBindingName=" + bindingName, bindingName, 2); String value = new String(result.get(0).getPayload()); assertThat(value).isEqualTo("3 items to be written."); value = new String(result.get(1).getPayload()); assertThat(value).isEqualTo("3 items have been written."); } private String[] getCommandLineParams(String sinkChannelParam) { return getCommandLineParams(sinkChannelParam, true); } private String[] getCommandLineParams(String sinkChannelParam, boolean enableFailJobConfig) { String jobConfig = enableFailJobConfig ? "--spring.cloud.task.test.enable-job-configuration=true" : "--spring.cloud.task.test.enable-fail-job-configuration=true"; return new String[] { "--spring.cloud.task.closecontext_enable=false", "--spring.cloud.task.name=" + TASK_NAME, "--spring.main.web-environment=false", "--spring.cloud.stream.defaultBinder=rabbit", "--spring.cloud.stream.bindings.task-events.destination=test", jobConfig, "foo=" + UUID.randomUUID(), sinkChannelParam }; } private List> testListener(String channelBinding, String bindingName, int numberToRead) { return testListenerForApp(channelBinding, bindingName, numberToRead, BatchEventsApplication.class, true); } private List> testListenerSkip(String channelBinding, String bindingName, int numberToRead) { return testListenerForApp(channelBinding, bindingName, numberToRead, BatchSkipEventsApplication.class, false); } private List> testListenerForApp(String channelBinding, String bindingName, int numberToRead, Class clazz, boolean enableFailJobConfig) { List> results = new ArrayList<>(); this.applicationContext = new SpringApplicationBuilder() .sources(TestChannelBinderConfiguration.getCompleteConfiguration(clazz)) .web(WebApplicationType.NONE) .build() .run(getCommandLineParams(channelBinding, enableFailJobConfig)); OutputDestination target = this.applicationContext.getBean(OutputDestination.class); for (int i = 0; i < numberToRead; i++) { results.add(target.receive(10000, bindingName)); } return results; } @Test public void testItemReadListener() { final String bindingName = "item-read-events-foobar"; List> result = testListenerSkip( "--spring.cloud.task.batch.events.itemReadEventBindingName=" + bindingName, bindingName, 1); String exceptionMessage = new String(result.get(0).getPayload()); assertThat(exceptionMessage).isEqualTo("Exception while item was being read"); } @Test public void testSkipEventListener() { final String SKIPPING_READ_MESSAGE = "Skipped when reading."; final String SKIPPING_WRITE_CONTENT = "-1"; final String bindingName = "skip-event-foobar"; List> result = testListenerSkip( "--spring.cloud.task.batch.events.skipEventBindingName=" + bindingName, bindingName, 3); int readSkipCount = 0; int writeSkipCount = 0; for (int i = 0; i < 3; i++) { String exceptionMessage = new String(result.get(i).getPayload()); if (exceptionMessage.equals(SKIPPING_READ_MESSAGE)) { readSkipCount++; } if (exceptionMessage.equals(SKIPPING_WRITE_CONTENT)) { writeSkipCount++; } } assertThat(readSkipCount).as("the number of read skip events did not match").isEqualTo(2); assertThat(writeSkipCount).as("the number of write skip events did not match").isEqualTo(1); } @SpringBootApplication @EnableTask @Import(JobConfiguration.class) public static class BatchEventsApplication { } @SpringBootApplication @EnableTask @Import(JobSkipConfiguration.class) public static class BatchSkipEventsApplication { } } ================================================ FILE: spring-cloud-task-integration-tests/src/test/java/org/springframework/cloud/task/listener/TaskEventTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import tools.jackson.databind.DeserializationFeature; import tools.jackson.databind.ObjectMapper; import tools.jackson.databind.json.JsonMapper; import org.springframework.boot.WebApplicationType; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.cloud.stream.binder.test.OutputDestination; import org.springframework.cloud.stream.binder.test.TestChannelBinderConfiguration; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Configuration; import org.springframework.messaging.Message; /** * @author Michael Minella * @author Ilayaperumal Gopinathan */ public class TaskEventTests { private static final String TASK_NAME = "taskEventTest"; private ObjectMapper objectMapper; private ConfigurableApplicationContext applicationContext; @BeforeEach public void setup() { objectMapper = JsonMapper.builder().disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES).build(); } @AfterEach public void tearDown() { if (this.applicationContext != null && this.applicationContext.isActive()) { this.applicationContext.close(); } } @Test public void testTaskEventListener() throws Exception { List> result = testListener( "--spring.cloud.task.batch.events.itemWriteEventBindingName=task-events", "task-events", 2); TaskExecution taskExecution = this.objectMapper.readValue(result.get(0).getPayload(), TaskExecution.class); Assertions.assertThat(taskExecution.getTaskName()) .isEqualTo(TASK_NAME) .as(String.format("Task name should be '%s'", TASK_NAME)); taskExecution = this.objectMapper.readValue(result.get(1).getPayload(), TaskExecution.class); Assertions.assertThat(taskExecution.getTaskName()) .isEqualTo(TASK_NAME) .as(String.format("Task name should be '%s'", TASK_NAME)); } private List> testListener(String channelBinding, String bindingName, int numberToRead) { List> results = new ArrayList<>(); this.applicationContext = new SpringApplicationBuilder() .sources(TestChannelBinderConfiguration .getCompleteConfiguration(BatchExecutionEventTests.BatchEventsApplication.class)) .web(WebApplicationType.NONE) .build() .run(getCommandLineParams(channelBinding)); OutputDestination target = this.applicationContext.getBean(OutputDestination.class); for (int i = 0; i < numberToRead; i++) { results.add(target.receive(10000, bindingName)); } return results; } private String[] getCommandLineParams(String sinkChannelParam) { return new String[] { "--spring.cloud.task.closecontext_enable=false", "--spring.cloud.task.name=" + TASK_NAME, "--spring.main.web-environment=false", "--spring.cloud.stream.defaultBinder=rabbit", "foo=" + UUID.randomUUID(), sinkChannelParam }; } @EnableTask @Configuration public static class TaskEventsConfiguration { } } ================================================ FILE: spring-cloud-task-integration-tests/src/test/resources/application.properties ================================================ logging.level.org.springframework.cloud.task=DEBUG logging.level.org.springframework.cloud.stream=DEBUG spring.application.name=batchEvents ================================================ FILE: spring-cloud-task-integration-tests/src/test/resources/org/springframework/cloud/task/listener/chunk-events-sink-channel.properties ================================================ # # Copyright 2016-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # spring.cloud.stream.bindings.input.destination=chunk-events-foobar ================================================ FILE: spring-cloud-task-integration-tests/src/test/resources/org/springframework/cloud/task/listener/item-process-sink-channel.properties ================================================ # # Copyright 2016-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # spring.cloud.stream.bindings.input.destination=item-process-foobar ================================================ FILE: spring-cloud-task-integration-tests/src/test/resources/org/springframework/cloud/task/listener/item-read-events-sink-channel.properties ================================================ # # Copyright 2016-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # spring.cloud.stream.bindings.input.destination=item-read-events-foobar ================================================ FILE: spring-cloud-task-integration-tests/src/test/resources/org/springframework/cloud/task/listener/item-write-events-sink-channel.properties ================================================ # # Copyright 2016-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # spring.cloud.stream.bindings.input.destination=item-write-events-foobar ================================================ FILE: spring-cloud-task-integration-tests/src/test/resources/org/springframework/cloud/task/listener/job-execution-sink-channel.properties ================================================ # # Copyright 2016-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # spring.cloud.stream.bindings.input.destination=foobar ================================================ FILE: spring-cloud-task-integration-tests/src/test/resources/org/springframework/cloud/task/listener/sink-channel.properties ================================================ # # Copyright 2016-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # spring.cloud.stream.bindings.input.destination=test ================================================ FILE: spring-cloud-task-integration-tests/src/test/resources/org/springframework/cloud/task/listener/skip-events-sink-channel.properties ================================================ # # Copyright 2016-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # spring.cloud.stream.bindings.input.destination=skip-event-foobar ================================================ FILE: spring-cloud-task-integration-tests/src/test/resources/org/springframework/cloud/task/listener/step-execution-sink-channel.properties ================================================ # # Copyright 2016-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # spring.cloud.stream.bindings.input.destination=step-execution-foobar ================================================ FILE: spring-cloud-task-samples/batch-events/.mvn/wrapper/maven-wrapper.properties ================================================ distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.6.0/apache-maven-3.6.0-bin.zip ================================================ FILE: spring-cloud-task-samples/batch-events/README.adoc ================================================ = Task Events This is a task application that emits batch job events to the following channels: * job-execution-events * step-execution-events * chunk-events * item-read-events * item-process-events * item-write-events * skip-events Note: More information on this topic is available https://docs.spring.io/spring-cloud-task/docs/current/reference/html/#stream-integration-batch-events[here]. == Requirements: * Java 17 or Above == Build: [source,shell] ---- ./mvnw clean install ---- == Execution: [source,shell] ---- java -jar target/batch-events-5.0.0.jar ---- For example, you can listen for specific job-execution events on a specified channel with a Spring Cloud Stream Sink like the https://github.com/spring-cloud/stream-applications/tree/main/applications/sink/log-sink[log sink] using the following: [source,shell] ---- $ java -jar /log-sink-rabbit-3.1.2.jar --server.port=9090 --spring.cloud.stream.bindings.input.destination=job-execution-events ---- == Dependencies: The batch-events sample requires an instance of RabbitMQ to be running. ================================================ FILE: spring-cloud-task-samples/batch-events/mvnw ================================================ #!/bin/sh # ---------------------------------------------------------------------------- # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # Maven2 Start Up Batch script # # Required ENV vars: # ------------------ # JAVA_HOME - location of a JDK home dir # # Optional ENV vars # ----------------- # M2_HOME - location of maven2's installed home dir # MAVEN_OPTS - parameters passed to the Java VM when running Maven # e.g. to debug Maven itself, use # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 # MAVEN_SKIP_RC - flag to disable loading of mavenrc files # ---------------------------------------------------------------------------- if [ -z "$MAVEN_SKIP_RC" ] ; then if [ -f /etc/mavenrc ] ; then . /etc/mavenrc fi if [ -f "$HOME/.mavenrc" ] ; then . "$HOME/.mavenrc" fi fi # OS specific support. $var _must_ be set to either true or false. cygwin=false; darwin=false; mingw=false case "`uname`" in CYGWIN*) cygwin=true ;; MINGW*) mingw=true;; Darwin*) darwin=true # # Look for the Apple JDKs first to preserve the existing behaviour, and then look # for the new JDKs provided by Oracle. # if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK ] ; then # # Apple JDKs # export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home fi if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Java/JavaVirtualMachines/CurrentJDK ] ; then # # Apple JDKs # export JAVA_HOME=/System/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home fi if [ -z "$JAVA_HOME" ] && [ -L "/Library/Java/JavaVirtualMachines/CurrentJDK" ] ; then # # Oracle JDKs # export JAVA_HOME=/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home fi if [ -z "$JAVA_HOME" ] && [ -x "/usr/libexec/java_home" ]; then # # Apple JDKs # export JAVA_HOME=`/usr/libexec/java_home` fi ;; esac if [ -z "$JAVA_HOME" ] ; then if [ -r /etc/gentoo-release ] ; then JAVA_HOME=`java-config --jre-home` fi fi if [ -z "$M2_HOME" ] ; then ## resolve links - $0 may be a link to maven's home PRG="$0" # need this for relative symlinks while [ -h "$PRG" ] ; do ls=`ls -ld "$PRG"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then PRG="$link" else PRG="`dirname "$PRG"`/$link" fi done saveddir=`pwd` M2_HOME=`dirname "$PRG"`/.. # make it fully qualified M2_HOME=`cd "$M2_HOME" && pwd` cd "$saveddir" # echo Using m2 at $M2_HOME fi # For Cygwin, ensure paths are in UNIX format before anything is touched if $cygwin ; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --unix "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"` fi # For Migwn, ensure paths are in UNIX format before anything is touched if $mingw ; then [ -n "$M2_HOME" ] && M2_HOME="`(cd "$M2_HOME"; pwd)`" [ -n "$JAVA_HOME" ] && JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" # TODO classpath? fi if [ -z "$JAVA_HOME" ]; then javaExecutable="`which javac`" if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then # readlink(1) is not available as standard on Solaris 10. readLink=`which readlink` if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then if $darwin ; then javaHome="`dirname \"$javaExecutable\"`" javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" else javaExecutable="`readlink -f \"$javaExecutable\"`" fi javaHome="`dirname \"$javaExecutable\"`" javaHome=`expr "$javaHome" : '\(.*\)/bin'` JAVA_HOME="$javaHome" export JAVA_HOME fi fi fi if [ -z "$JAVACMD" ] ; then if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD="$JAVA_HOME/jre/sh/java" else JAVACMD="$JAVA_HOME/bin/java" fi else JAVACMD="`which java`" fi fi if [ ! -x "$JAVACMD" ] ; then echo "Error: JAVA_HOME is not defined correctly." >&2 echo " We cannot execute $JAVACMD" >&2 exit 1 fi if [ -z "$JAVA_HOME" ] ; then echo "Warning: JAVA_HOME environment variable is not set." fi CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher # For Cygwin, switch paths to Windows format before running java if $cygwin; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --path --windows "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"` fi # traverses directory structure from process work directory to filesystem root # first directory with .mvn subdirectory is considered project base directory find_maven_basedir() { local basedir=$(pwd) local wdir=$(pwd) while [ "$wdir" != '/' ] ; do if [ -d "$wdir"/.mvn ] ; then basedir=$wdir break fi wdir=$(cd "$wdir/.."; pwd) done echo "${basedir}" } # concatenates all lines of a file concat_lines() { if [ -f "$1" ]; then echo "$(tr -s '\n' ' ' < "$1")" fi } export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-$(find_maven_basedir)} MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" # Provide a "standardized" way to retrieve the CLI args that will # work with both Windows and non-Windows executions. MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" export MAVEN_CMD_LINE_ARGS WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain exec "$JAVACMD" \ $MAVEN_OPTS \ -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ ${WRAPPER_LAUNCHER} "$@" ================================================ FILE: spring-cloud-task-samples/batch-events/mvnw.cmd ================================================ @REM ---------------------------------------------------------------------------- @REM Licensed to the Apache Software Foundation (ASF) under one @REM or more contributor license agreements. See the NOTICE file @REM distributed with this work for additional information @REM regarding copyright ownership. The ASF licenses this file @REM to you under the Apache License, Version 2.0 (the @REM "License"); you may not use this file except in compliance @REM with the License. You may obtain a copy of the License at @REM @REM https://www.apache.org/licenses/LICENSE-2.0 @REM @REM Unless required by applicable law or agreed to in writing, @REM software distributed under the License is distributed on an @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY @REM KIND, either express or implied. See the License for the @REM specific language governing permissions and limitations @REM under the License. @REM ---------------------------------------------------------------------------- @REM ---------------------------------------------------------------------------- @REM Maven2 Start Up Batch script @REM @REM Required ENV vars: @REM JAVA_HOME - location of a JDK home dir @REM @REM Optional ENV vars @REM M2_HOME - location of maven2's installed home dir @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven @REM e.g. to debug Maven itself, use @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files @REM ---------------------------------------------------------------------------- @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' @echo off @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% @REM set %HOME% to equivalent of $HOME if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") @REM Execute a user defined script before this one if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre @REM check for pre script, once with legacy .bat ending and once with .cmd ending if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" :skipRcPre @setlocal set ERROR_CODE=0 @REM To isolate internal variables from possible post scripts, we use another setlocal @setlocal @REM ==== START VALIDATION ==== if not "%JAVA_HOME%" == "" goto OkJHome echo. echo Error: JAVA_HOME not found in your environment. >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error :OkJHome if exist "%JAVA_HOME%\bin\java.exe" goto init echo. echo Error: JAVA_HOME is set to an invalid directory. >&2 echo JAVA_HOME = "%JAVA_HOME%" >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error @REM ==== END VALIDATION ==== :init set MAVEN_CMD_LINE_ARGS=%* @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". @REM Fallback to current working directory if not found. set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir set EXEC_DIR=%CD% set WDIR=%EXEC_DIR% :findBaseDir IF EXIST "%WDIR%"\.mvn goto baseDirFound cd .. IF "%WDIR%"=="%CD%" goto baseDirNotFound set WDIR=%CD% goto findBaseDir :baseDirFound set MAVEN_PROJECTBASEDIR=%WDIR% cd "%EXEC_DIR%" goto endDetectBaseDir :baseDirNotFound set MAVEN_PROJECTBASEDIR=%EXEC_DIR% cd "%EXEC_DIR%" :endDetectBaseDir IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig @setlocal EnableExtensions EnableDelayedExpansion for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% :endReadAdditionalConfig SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" set WRAPPER_JAR="".\.mvn\wrapper\maven-wrapper.jar"" set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CMD_LINE_ARGS% if ERRORLEVEL 1 goto error goto end :error set ERROR_CODE=1 :end @endlocal & set ERROR_CODE=%ERROR_CODE% if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost @REM check for post script, once with legacy .bat ending and once with .cmd ending if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" :skipRcPost @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' if "%MAVEN_BATCH_PAUSE%" == "on" pause if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% exit /B %ERROR_CODE% ================================================ FILE: spring-cloud-task-samples/batch-events/pom.xml ================================================ 4.0.0 io.spring.cloud batch-events 5.0.0-SNAPSHOT jar Batch Events Sample Application Sample of sending batch events via Spring Cloud Streams org.springframework.boot spring-boot-starter-parent 4.0.2 UTF-8 17 5.0.2-SNAPSHOT 5.0.2-SNAPSHOT org.springframework.cloud spring-cloud-task-dependencies ${project.version} pom import org.springframework.cloud spring-cloud-stream-dependencies ${spring-cloud-stream.version} pom import org.springframework.cloud spring-cloud-task-dependencies ${spring-cloud-task.version} pom import org.springframework.boot spring-boot-starter-batch org.springframework.boot spring-boot-starter-test test org.springframework.cloud spring-cloud-starter-stream-rabbit compile org.springframework.cloud spring-cloud-stream-binder-rabbit-test-support test org.springframework.cloud spring-cloud-starter-task com.h2database h2 org.springframework.cloud spring-cloud-stream-test-binder ${spring-cloud-stream.version} test org.springframework.boot spring-boot-maven-plugin org.apache.maven.plugins maven-javadoc-plugin attach-javadocs jar org.apache.maven.plugins maven-source-plugin attach-sources jar maven-deploy-plugin true spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false withoutDockerTests maven-surefire-plugin DockerRequired ================================================ FILE: spring-cloud-task-samples/batch-events/src/main/java/io/spring/cloud/BatchEventsApplication.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.cloud; import java.util.Arrays; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.Step; import org.springframework.batch.core.step.StepContribution; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.infrastructure.item.Chunk; import org.springframework.batch.infrastructure.item.ItemProcessor; import org.springframework.batch.infrastructure.item.ItemWriter; import org.springframework.batch.infrastructure.item.support.ListItemReader; import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.transaction.PlatformTransactionManager; @EnableTask @SpringBootApplication public class BatchEventsApplication { public static void main(String[] args) { SpringApplication.run(BatchEventsApplication.class, args); } @Configuration public static class JobConfiguration { private static final int DEFAULT_CHUNK_COUNT = 3; @Autowired private JobRepository jobRepository; @Autowired private PlatformTransactionManager transactionManager; @Bean public Step step1() { return new StepBuilder("step1", this.jobRepository).tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { System.out.println("Tasklet has run"); return RepeatStatus.FINISHED; } }, transactionManager).build(); } @Bean public Step step2() { return new StepBuilder("step2", this.jobRepository) .chunk(DEFAULT_CHUNK_COUNT, this.transactionManager) .reader(new ListItemReader<>(Arrays.asList("1", "2", "3", "4", "5", "6"))) .processor(new ItemProcessor() { @Override public String process(String item) throws Exception { return String.valueOf(Integer.parseInt(item) * -1); } }) .writer(new ItemWriter() { @Override public void write(Chunk items) throws Exception { for (String item : items) { System.out.println(">> " + item); } } }) .build(); } @Bean public Job job() { return new JobBuilder("job", this.jobRepository).start(step1()).next(step2()).build(); } } } ================================================ FILE: spring-cloud-task-samples/batch-events/src/main/java/io/spring/cloud/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Batch events sample application for Spring Cloud Task. */ package io.spring.cloud; ================================================ FILE: spring-cloud-task-samples/batch-events/src/main/resources/application.properties ================================================ logging.level.org.springframework.cloud.task=DEBUG logging.level.org.springframework.cloud.stream=DEBUG spring.application.name=batchEvents ================================================ FILE: spring-cloud-task-samples/batch-events/src/main/resources/logback-test.xml ================================================ ================================================ FILE: spring-cloud-task-samples/batch-events/src/test/java/io/spring/cloud/BatchEventsApplicationTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.cloud; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import tools.jackson.databind.DeserializationFeature; import tools.jackson.databind.ObjectMapper; import tools.jackson.databind.json.JsonMapper; import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.cloud.stream.binder.test.OutputDestination; import org.springframework.cloud.stream.binder.test.TestChannelBinderConfiguration; import org.springframework.cloud.task.batch.listener.support.JobExecutionEvent; import org.springframework.cloud.task.batch.listener.support.TaskEventProperties; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Import; import org.springframework.messaging.Message; import static org.assertj.core.api.Assertions.assertThat; @Tag("DockerRequired") public class BatchEventsApplicationTests { private static final String TASK_NAME = "taskEventTest"; private ConfigurableApplicationContext applicationContext; private ObjectMapper objectMapper; private final TaskEventProperties taskEventProperties = new TaskEventProperties(); @BeforeEach public void setup() { objectMapper = JsonMapper.builder().disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES).build(); } @AfterEach public void tearDown() { if (this.applicationContext != null && this.applicationContext.isActive()) { this.applicationContext.close(); } } @Test public void testExecution() throws Exception { List> result = testListener(taskEventProperties.getJobExecutionEventBindingName(), 1); JobExecutionEvent jobExecutionEvent = this.objectMapper.readValue(result.get(0).getPayload(), JobExecutionEvent.class); assertThat(jobExecutionEvent.getJobInstance().getJobName()).isEqualTo("job").as("Job name should be job"); } private String[] getCommandLineParams(boolean enableFailJobConfig) { String jobConfig = enableFailJobConfig ? "--spring.cloud.task.test.enable-job-configuration=true" : "--spring.cloud.task.test.enable-fail-job-configuration=true"; return new String[] { "--spring.cloud.task.closecontext_enable=false", "--spring.cloud.task.name=" + TASK_NAME, "--spring.main.web-environment=false", "--spring.cloud.stream.defaultBinder=rabbit", "--spring.cloud.stream.bindings.task-events.destination=test", jobConfig, "foo=" + UUID.randomUUID() }; } private List> testListener(String bindingName, int numberToRead) { List> results = new ArrayList<>(); this.applicationContext = new SpringApplicationBuilder() .sources(TestChannelBinderConfiguration.getCompleteConfiguration(BatchEventsTestApplication.class)) .web(WebApplicationType.NONE) .build() .run(getCommandLineParams(true)); OutputDestination target = this.applicationContext.getBean(OutputDestination.class); for (int i = 0; i < numberToRead; i++) { results.add(target.receive(10000, bindingName)); } return results; } @SpringBootApplication @Import({ BatchEventsApplication.class }) public static class BatchEventsTestApplication { } } ================================================ FILE: spring-cloud-task-samples/batch-events/src/test/resources/io/spring/task/listener/job-listener-sink-channel.properties ================================================ # # Copyright 2015-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # spring.cloud.stream.bindings.input.destination=job-execution-events spring.cloud.stream.bindings.input.group=testgroup ================================================ FILE: spring-cloud-task-samples/batch-job/.mvn/wrapper/maven-wrapper.properties ================================================ distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.6.0/apache-maven-3.6.0-bin.zip ================================================ FILE: spring-cloud-task-samples/batch-job/README.adoc ================================================ = Spring Batch Job Task This is a Spring Cloud Task application that executes a single Spring Batch Job. == Requirements: * Java 17 or Above == Classes: * BatchJobApplication - the Spring Boot Main Application * JobConfiguration - the configuration for the Spring Batch jobs == Build: [source,shell] ---- mvn clean package ---- == Run: [source,shell] ---- java -jar target/batch-job-5.0.0.jar ---- == Native Build: [source,shell] ---- mvn -Pnative native:compile ---- == Native Run: [source,shell] ---- ./target/batch-job ---- == Run the application with a Postgesql test container [source,shell] ---- ./mvnw spring-boot:test-run ---- ================================================ FILE: spring-cloud-task-samples/batch-job/mvnw ================================================ #!/bin/sh # ---------------------------------------------------------------------------- # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # Maven2 Start Up Batch script # # Required ENV vars: # ------------------ # JAVA_HOME - location of a JDK home dir # # Optional ENV vars # ----------------- # M2_HOME - location of maven2's installed home dir # MAVEN_OPTS - parameters passed to the Java VM when running Maven # e.g. to debug Maven itself, use # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 # MAVEN_SKIP_RC - flag to disable loading of mavenrc files # ---------------------------------------------------------------------------- if [ -z "$MAVEN_SKIP_RC" ] ; then if [ -f /etc/mavenrc ] ; then . /etc/mavenrc fi if [ -f "$HOME/.mavenrc" ] ; then . "$HOME/.mavenrc" fi fi # OS specific support. $var _must_ be set to either true or false. cygwin=false; darwin=false; mingw=false case "`uname`" in CYGWIN*) cygwin=true ;; MINGW*) mingw=true;; Darwin*) darwin=true # # Look for the Apple JDKs first to preserve the existing behaviour, and then look # for the new JDKs provided by Oracle. # if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK ] ; then # # Apple JDKs # export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home fi if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Java/JavaVirtualMachines/CurrentJDK ] ; then # # Apple JDKs # export JAVA_HOME=/System/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home fi if [ -z "$JAVA_HOME" ] && [ -L "/Library/Java/JavaVirtualMachines/CurrentJDK" ] ; then # # Oracle JDKs # export JAVA_HOME=/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home fi if [ -z "$JAVA_HOME" ] && [ -x "/usr/libexec/java_home" ]; then # # Apple JDKs # export JAVA_HOME=`/usr/libexec/java_home` fi ;; esac if [ -z "$JAVA_HOME" ] ; then if [ -r /etc/gentoo-release ] ; then JAVA_HOME=`java-config --jre-home` fi fi if [ -z "$M2_HOME" ] ; then ## resolve links - $0 may be a link to maven's home PRG="$0" # need this for relative symlinks while [ -h "$PRG" ] ; do ls=`ls -ld "$PRG"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then PRG="$link" else PRG="`dirname "$PRG"`/$link" fi done saveddir=`pwd` M2_HOME=`dirname "$PRG"`/.. # make it fully qualified M2_HOME=`cd "$M2_HOME" && pwd` cd "$saveddir" # echo Using m2 at $M2_HOME fi # For Cygwin, ensure paths are in UNIX format before anything is touched if $cygwin ; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --unix "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"` fi # For Migwn, ensure paths are in UNIX format before anything is touched if $mingw ; then [ -n "$M2_HOME" ] && M2_HOME="`(cd "$M2_HOME"; pwd)`" [ -n "$JAVA_HOME" ] && JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" # TODO classpath? fi if [ -z "$JAVA_HOME" ]; then javaExecutable="`which javac`" if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then # readlink(1) is not available as standard on Solaris 10. readLink=`which readlink` if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then if $darwin ; then javaHome="`dirname \"$javaExecutable\"`" javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" else javaExecutable="`readlink -f \"$javaExecutable\"`" fi javaHome="`dirname \"$javaExecutable\"`" javaHome=`expr "$javaHome" : '\(.*\)/bin'` JAVA_HOME="$javaHome" export JAVA_HOME fi fi fi if [ -z "$JAVACMD" ] ; then if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD="$JAVA_HOME/jre/sh/java" else JAVACMD="$JAVA_HOME/bin/java" fi else JAVACMD="`which java`" fi fi if [ ! -x "$JAVACMD" ] ; then echo "Error: JAVA_HOME is not defined correctly." >&2 echo " We cannot execute $JAVACMD" >&2 exit 1 fi if [ -z "$JAVA_HOME" ] ; then echo "Warning: JAVA_HOME environment variable is not set." fi CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher # For Cygwin, switch paths to Windows format before running java if $cygwin; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --path --windows "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"` fi # traverses directory structure from process work directory to filesystem root # first directory with .mvn subdirectory is considered project base directory find_maven_basedir() { local basedir=$(pwd) local wdir=$(pwd) while [ "$wdir" != '/' ] ; do if [ -d "$wdir"/.mvn ] ; then basedir=$wdir break fi wdir=$(cd "$wdir/.."; pwd) done echo "${basedir}" } # concatenates all lines of a file concat_lines() { if [ -f "$1" ]; then echo "$(tr -s '\n' ' ' < "$1")" fi } export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-$(find_maven_basedir)} MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" # Provide a "standardized" way to retrieve the CLI args that will # work with both Windows and non-Windows executions. MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" export MAVEN_CMD_LINE_ARGS WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain exec "$JAVACMD" \ $MAVEN_OPTS \ -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ ${WRAPPER_LAUNCHER} "$@" ================================================ FILE: spring-cloud-task-samples/batch-job/mvnw.cmd ================================================ @REM ---------------------------------------------------------------------------- @REM Licensed to the Apache Software Foundation (ASF) under one @REM or more contributor license agreements. See the NOTICE file @REM distributed with this work for additional information @REM regarding copyright ownership. The ASF licenses this file @REM to you under the Apache License, Version 2.0 (the @REM "License"); you may not use this file except in compliance @REM with the License. You may obtain a copy of the License at @REM @REM https://www.apache.org/licenses/LICENSE-2.0 @REM @REM Unless required by applicable law or agreed to in writing, @REM software distributed under the License is distributed on an @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY @REM KIND, either express or implied. See the License for the @REM specific language governing permissions and limitations @REM under the License. @REM ---------------------------------------------------------------------------- @REM ---------------------------------------------------------------------------- @REM Maven2 Start Up Batch script @REM @REM Required ENV vars: @REM JAVA_HOME - location of a JDK home dir @REM @REM Optional ENV vars @REM M2_HOME - location of maven2's installed home dir @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven @REM e.g. to debug Maven itself, use @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files @REM ---------------------------------------------------------------------------- @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' @echo off @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% @REM set %HOME% to equivalent of $HOME if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") @REM Execute a user defined script before this one if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre @REM check for pre script, once with legacy .bat ending and once with .cmd ending if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" :skipRcPre @setlocal set ERROR_CODE=0 @REM To isolate internal variables from possible post scripts, we use another setlocal @setlocal @REM ==== START VALIDATION ==== if not "%JAVA_HOME%" == "" goto OkJHome echo. echo Error: JAVA_HOME not found in your environment. >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error :OkJHome if exist "%JAVA_HOME%\bin\java.exe" goto init echo. echo Error: JAVA_HOME is set to an invalid directory. >&2 echo JAVA_HOME = "%JAVA_HOME%" >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error @REM ==== END VALIDATION ==== :init set MAVEN_CMD_LINE_ARGS=%* @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". @REM Fallback to current working directory if not found. set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir set EXEC_DIR=%CD% set WDIR=%EXEC_DIR% :findBaseDir IF EXIST "%WDIR%"\.mvn goto baseDirFound cd .. IF "%WDIR%"=="%CD%" goto baseDirNotFound set WDIR=%CD% goto findBaseDir :baseDirFound set MAVEN_PROJECTBASEDIR=%WDIR% cd "%EXEC_DIR%" goto endDetectBaseDir :baseDirNotFound set MAVEN_PROJECTBASEDIR=%EXEC_DIR% cd "%EXEC_DIR%" :endDetectBaseDir IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig @setlocal EnableExtensions EnableDelayedExpansion for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% :endReadAdditionalConfig SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" set WRAPPER_JAR="".\.mvn\wrapper\maven-wrapper.jar"" set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CMD_LINE_ARGS% if ERRORLEVEL 1 goto error goto end :error set ERROR_CODE=1 :end @endlocal & set ERROR_CODE=%ERROR_CODE% if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost @REM check for post script, once with legacy .bat ending and once with .cmd ending if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" :skipRcPost @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' if "%MAVEN_BATCH_PAUSE%" == "on" pause if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% exit /B %ERROR_CODE% ================================================ FILE: spring-cloud-task-samples/batch-job/pom.xml ================================================ 4.0.0 io.spring.cloud batch-job jar 5.0.0-SNAPSHOT Spring Cloud Task Batch Example Batch Job Sample Application org.springframework.boot spring-boot-starter-parent 4.0.2 UTF-8 17 org.springframework.cloud spring-cloud-task-dependencies ${project.version} pom import org.springframework.boot spring-boot-starter-batch org.springframework.cloud spring-cloud-starter-task com.h2database h2 runtime org.springframework.boot spring-boot-starter-test test org.mariadb.jdbc mariadb-java-client org.testcontainers testcontainers-postgresql test org.springframework.boot spring-boot-testcontainers test org.postgresql postgresql spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false org.springframework.boot spring-boot-maven-plugin org.apache.maven.plugins maven-javadoc-plugin attach-javadocs jar org.apache.maven.plugins maven-source-plugin attach-sources jar maven-deploy-plugin true ================================================ FILE: spring-cloud-task-samples/batch-job/src/main/java/io/spring/BatchJobApplication.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.task.configuration.EnableTask; @EnableTask @SpringBootApplication public class BatchJobApplication { public static void main(String[] args) { SpringApplication.run(BatchJobApplication.class, args); } } ================================================ FILE: spring-cloud-task-samples/batch-job/src/main/java/io/spring/configuration/JobConfiguration.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.configuration; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.StepContribution; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.infrastructure.repeat.RepeatStatus; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.transaction.PlatformTransactionManager; /** * @author Michael Minella */ @Configuration(proxyBeanMethods = false) public class JobConfiguration { private static final Log logger = LogFactory.getLog(JobConfiguration.class); @Autowired public JobRepository jobRepository; @Autowired public PlatformTransactionManager transactionManager; @Bean public Job job1() { return new JobBuilder("job1", this.jobRepository) .start(new StepBuilder("job1step1", this.jobRepository).tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { logger.info("Job1 was run"); return RepeatStatus.FINISHED; } }, transactionManager).build()) .build(); } } ================================================ FILE: spring-cloud-task-samples/batch-job/src/main/java/io/spring/configuration/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Configuration classes for the batch job sample application. */ package io.spring.configuration; ================================================ FILE: spring-cloud-task-samples/batch-job/src/main/java/io/spring/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Batch job sample application for Spring Cloud Task. */ package io.spring; ================================================ FILE: spring-cloud-task-samples/batch-job/src/main/resources/application.properties ================================================ spring.application.name=Demo Batch Job Task logging.level.org.springframework.cloud.task=DEBUG ================================================ FILE: spring-cloud-task-samples/batch-job/src/test/java/io/spring/BatchJobApplicationTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.boot.SpringApplication; import org.springframework.boot.test.system.CapturedOutput; import org.springframework.boot.test.system.OutputCaptureExtension; import static org.assertj.core.api.Assertions.assertThat; /** * Verifies that the Task Application outputs the correct task log entries. * * @author Michael Minella */ @ExtendWith(OutputCaptureExtension.class) public class BatchJobApplicationTests { @Test public void testBatchJobApp(CapturedOutput capturedOutput) throws Exception { final String JOB_RUN_MESSAGE = " was run"; final String CREATE_TASK_MESSAGE = "Creating: TaskExecution{executionId="; final String UPDATE_TASK_MESSAGE = "Updating: TaskExecution with executionId="; final String JOB_ASSOCIATION_MESSAGE = "The job execution id "; final String EXIT_CODE_MESSAGE = "with the following {exitCode=0"; SpringApplication.run(BatchJobApplication.class); String output = capturedOutput.toString(); assertThat(output).contains(JOB_RUN_MESSAGE); assertThat(output).contains(CREATE_TASK_MESSAGE); assertThat(output).contains(UPDATE_TASK_MESSAGE); assertThat(output).contains(EXIT_CODE_MESSAGE); int i = output.indexOf(JOB_ASSOCIATION_MESSAGE); assertThat(i).isGreaterThan(0); String taskTitle = "taskName='Demo Batch Job Task'"; Pattern pattern = Pattern.compile(taskTitle); Matcher matcher = pattern.matcher(output); int count = 0; while (matcher.find()) { count++; } assertThat(count).isEqualTo(1); } } ================================================ FILE: spring-cloud-task-samples/batch-job/src/test/java/io/spring/BatchJobTestConfiguration.java ================================================ /* * Copyright 2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring; import org.testcontainers.postgresql.PostgreSQLContainer; import org.testcontainers.utility.DockerImageName; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.testcontainers.service.connection.ServiceConnection; import org.springframework.context.annotation.Bean; @TestConfiguration(proxyBeanMethods = false) class BatchJobTestConfiguration { @Bean @ServiceConnection public PostgreSQLContainer postgresSQLContainer() { return new PostgreSQLContainer(DockerImageName.parse("postgres:15.1")); } } ================================================ FILE: spring-cloud-task-samples/batch-job/src/test/java/io/spring/TestBatchJobApp.java ================================================ /* * Copyright 2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring; import org.springframework.boot.SpringApplication; public class TestBatchJobApp { public static void main(String[] args) { String[] myArgs = { "--spring.batch.jdbc.initialize-schema=always" }; SpringApplication.from(BatchJobApplication::main).with(BatchJobTestConfiguration.class).run(myArgs); } } ================================================ FILE: spring-cloud-task-samples/batch-job/src/test/resources/application.properties ================================================ # # Copyright 2016-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # logging.level.root=DEBUG spring.application.name=Demo Batch Job Task ================================================ FILE: spring-cloud-task-samples/jpa-sample/.mvn/wrapper/maven-wrapper.properties ================================================ distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.6.0/apache-maven-3.6.0-bin.zip ================================================ FILE: spring-cloud-task-samples/jpa-sample/README.adoc ================================================ = JPA Sample Task This is a Spring Cloud Task Boot Application that uses JPA to persist data to a data store. == Requirements: * Java 17 or Above == Classes: * JpaApplication - the Spring Boot Main Application. * TaskRunComponent - Component responsible for writing data to the repository. * TaskRunOutput - Entity to be written to the repository. == Build: [source,shell] ---- mvn clean package ---- == Run: [source,shell] ---- java -jar target/jpa-sample-5.0.0.jar ---- == Native Build: [source,shell] ---- mvn -Pnative native:compile ---- == Native Run: [source,shell] ---- ./target/jpa-sample ---- ================================================ FILE: spring-cloud-task-samples/jpa-sample/mvnw ================================================ #!/bin/sh # ---------------------------------------------------------------------------- # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # Maven2 Start Up Batch script # # Required ENV vars: # ------------------ # JAVA_HOME - location of a JDK home dir # # Optional ENV vars # ----------------- # M2_HOME - location of maven2's installed home dir # MAVEN_OPTS - parameters passed to the Java VM when running Maven # e.g. to debug Maven itself, use # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 # MAVEN_SKIP_RC - flag to disable loading of mavenrc files # ---------------------------------------------------------------------------- if [ -z "$MAVEN_SKIP_RC" ] ; then if [ -f /etc/mavenrc ] ; then . /etc/mavenrc fi if [ -f "$HOME/.mavenrc" ] ; then . "$HOME/.mavenrc" fi fi # OS specific support. $var _must_ be set to either true or false. cygwin=false; darwin=false; mingw=false case "`uname`" in CYGWIN*) cygwin=true ;; MINGW*) mingw=true;; Darwin*) darwin=true # # Look for the Apple JDKs first to preserve the existing behaviour, and then look # for the new JDKs provided by Oracle. # if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK ] ; then # # Apple JDKs # export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home fi if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Java/JavaVirtualMachines/CurrentJDK ] ; then # # Apple JDKs # export JAVA_HOME=/System/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home fi if [ -z "$JAVA_HOME" ] && [ -L "/Library/Java/JavaVirtualMachines/CurrentJDK" ] ; then # # Oracle JDKs # export JAVA_HOME=/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home fi if [ -z "$JAVA_HOME" ] && [ -x "/usr/libexec/java_home" ]; then # # Apple JDKs # export JAVA_HOME=`/usr/libexec/java_home` fi ;; esac if [ -z "$JAVA_HOME" ] ; then if [ -r /etc/gentoo-release ] ; then JAVA_HOME=`java-config --jre-home` fi fi if [ -z "$M2_HOME" ] ; then ## resolve links - $0 may be a link to maven's home PRG="$0" # need this for relative symlinks while [ -h "$PRG" ] ; do ls=`ls -ld "$PRG"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then PRG="$link" else PRG="`dirname "$PRG"`/$link" fi done saveddir=`pwd` M2_HOME=`dirname "$PRG"`/.. # make it fully qualified M2_HOME=`cd "$M2_HOME" && pwd` cd "$saveddir" # echo Using m2 at $M2_HOME fi # For Cygwin, ensure paths are in UNIX format before anything is touched if $cygwin ; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --unix "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"` fi # For Migwn, ensure paths are in UNIX format before anything is touched if $mingw ; then [ -n "$M2_HOME" ] && M2_HOME="`(cd "$M2_HOME"; pwd)`" [ -n "$JAVA_HOME" ] && JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" # TODO classpath? fi if [ -z "$JAVA_HOME" ]; then javaExecutable="`which javac`" if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then # readlink(1) is not available as standard on Solaris 10. readLink=`which readlink` if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then if $darwin ; then javaHome="`dirname \"$javaExecutable\"`" javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" else javaExecutable="`readlink -f \"$javaExecutable\"`" fi javaHome="`dirname \"$javaExecutable\"`" javaHome=`expr "$javaHome" : '\(.*\)/bin'` JAVA_HOME="$javaHome" export JAVA_HOME fi fi fi if [ -z "$JAVACMD" ] ; then if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD="$JAVA_HOME/jre/sh/java" else JAVACMD="$JAVA_HOME/bin/java" fi else JAVACMD="`which java`" fi fi if [ ! -x "$JAVACMD" ] ; then echo "Error: JAVA_HOME is not defined correctly." >&2 echo " We cannot execute $JAVACMD" >&2 exit 1 fi if [ -z "$JAVA_HOME" ] ; then echo "Warning: JAVA_HOME environment variable is not set." fi CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher # For Cygwin, switch paths to Windows format before running java if $cygwin; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --path --windows "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"` fi # traverses directory structure from process work directory to filesystem root # first directory with .mvn subdirectory is considered project base directory find_maven_basedir() { local basedir=$(pwd) local wdir=$(pwd) while [ "$wdir" != '/' ] ; do if [ -d "$wdir"/.mvn ] ; then basedir=$wdir break fi wdir=$(cd "$wdir/.."; pwd) done echo "${basedir}" } # concatenates all lines of a file concat_lines() { if [ -f "$1" ]; then echo "$(tr -s '\n' ' ' < "$1")" fi } export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-$(find_maven_basedir)} MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" # Provide a "standardized" way to retrieve the CLI args that will # work with both Windows and non-Windows executions. MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" export MAVEN_CMD_LINE_ARGS WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain exec "$JAVACMD" \ $MAVEN_OPTS \ -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ ${WRAPPER_LAUNCHER} "$@" ================================================ FILE: spring-cloud-task-samples/jpa-sample/mvnw.cmd ================================================ @REM ---------------------------------------------------------------------------- @REM Licensed to the Apache Software Foundation (ASF) under one @REM or more contributor license agreements. See the NOTICE file @REM distributed with this work for additional information @REM regarding copyright ownership. The ASF licenses this file @REM to you under the Apache License, Version 2.0 (the @REM "License"); you may not use this file except in compliance @REM with the License. You may obtain a copy of the License at @REM @REM https://www.apache.org/licenses/LICENSE-2.0 @REM @REM Unless required by applicable law or agreed to in writing, @REM software distributed under the License is distributed on an @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY @REM KIND, either express or implied. See the License for the @REM specific language governing permissions and limitations @REM under the License. @REM ---------------------------------------------------------------------------- @REM ---------------------------------------------------------------------------- @REM Maven2 Start Up Batch script @REM @REM Required ENV vars: @REM JAVA_HOME - location of a JDK home dir @REM @REM Optional ENV vars @REM M2_HOME - location of maven2's installed home dir @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven @REM e.g. to debug Maven itself, use @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files @REM ---------------------------------------------------------------------------- @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' @echo off @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% @REM set %HOME% to equivalent of $HOME if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") @REM Execute a user defined script before this one if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre @REM check for pre script, once with legacy .bat ending and once with .cmd ending if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" :skipRcPre @setlocal set ERROR_CODE=0 @REM To isolate internal variables from possible post scripts, we use another setlocal @setlocal @REM ==== START VALIDATION ==== if not "%JAVA_HOME%" == "" goto OkJHome echo. echo Error: JAVA_HOME not found in your environment. >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error :OkJHome if exist "%JAVA_HOME%\bin\java.exe" goto init echo. echo Error: JAVA_HOME is set to an invalid directory. >&2 echo JAVA_HOME = "%JAVA_HOME%" >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error @REM ==== END VALIDATION ==== :init set MAVEN_CMD_LINE_ARGS=%* @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". @REM Fallback to current working directory if not found. set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir set EXEC_DIR=%CD% set WDIR=%EXEC_DIR% :findBaseDir IF EXIST "%WDIR%"\.mvn goto baseDirFound cd .. IF "%WDIR%"=="%CD%" goto baseDirNotFound set WDIR=%CD% goto findBaseDir :baseDirFound set MAVEN_PROJECTBASEDIR=%WDIR% cd "%EXEC_DIR%" goto endDetectBaseDir :baseDirNotFound set MAVEN_PROJECTBASEDIR=%EXEC_DIR% cd "%EXEC_DIR%" :endDetectBaseDir IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig @setlocal EnableExtensions EnableDelayedExpansion for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% :endReadAdditionalConfig SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" set WRAPPER_JAR="".\.mvn\wrapper\maven-wrapper.jar"" set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CMD_LINE_ARGS% if ERRORLEVEL 1 goto error goto end :error set ERROR_CODE=1 :end @endlocal & set ERROR_CODE=%ERROR_CODE% if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost @REM check for post script, once with legacy .bat ending and once with .cmd ending if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" :skipRcPost @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' if "%MAVEN_BATCH_PAUSE%" == "on" pause if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% exit /B %ERROR_CODE% ================================================ FILE: spring-cloud-task-samples/jpa-sample/pom.xml ================================================ 4.0.0 io.spring.cloud jpa-sample jar 5.0.0-SNAPSHOT To show users how to enable a task with a JPA application. Spring Cloud Task JPA Sample Application org.springframework.boot spring-boot-starter-parent 4.0.2 5.0.2-SNAPSHOT UTF-8 17 org.springframework.cloud spring-cloud-task-dependencies ${project.version} pom import org.springframework.cloud spring-cloud-starter-task org.springframework.boot spring-boot-starter-data-jpa com.h2database h2 org.springframework.boot spring-boot-starter-test test org.springframework.cloud spring-cloud-test-support ${spring-cloud-commons.version} test spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false org.springframework.boot spring-boot-maven-plugin org.apache.maven.plugins maven-javadoc-plugin attach-javadocs jar org.apache.maven.plugins maven-source-plugin attach-sources jar maven-deploy-plugin true ================================================ FILE: spring-cloud-task-samples/jpa-sample/src/main/java/io/spring/JpaApplication.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.task.configuration.EnableTask; @EnableTask @SpringBootApplication public class JpaApplication { public static void main(String[] args) { SpringApplication.run(JpaApplication.class, args); } } ================================================ FILE: spring-cloud-task-samples/jpa-sample/src/main/java/io/spring/configuration/TaskRunComponent.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.configuration; import java.text.SimpleDateFormat; import java.util.Date; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.task.listener.annotation.BeforeTask; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.stereotype.Component; /** * Records an entry in the TASK_RUN_OUTPUT table on the BeforeTask event. * * @author Glenn Renfro * @author Pas Apicella */ @Component public class TaskRunComponent { private static final Log logger = LogFactory.getLog(TaskRunComponent.class); @Autowired private TaskRunRepository taskRunRepository; @BeforeTask public void init(TaskExecution taskExecution) { String execDate = new SimpleDateFormat().format(new Date()); this.taskRunRepository.save(new TaskRunOutput("Executed at " + execDate)); logger.info("Executed at : " + execDate); } } ================================================ FILE: spring-cloud-task-samples/jpa-sample/src/main/java/io/spring/configuration/TaskRunOutput.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.configuration; import jakarta.persistence.Entity; import jakarta.persistence.GeneratedValue; import jakarta.persistence.GenerationType; import jakarta.persistence.Id; import jakarta.persistence.Table; /** * Entity for the id and output to be written to the data store. * * @author Pas Apicella * @author Glenn Renfro */ @Entity @Table(name = "TASK_RUN_OUTPUT") public class TaskRunOutput { @Id @GeneratedValue(strategy = GenerationType.AUTO) private Long id; private String output; public TaskRunOutput() { } public TaskRunOutput(String output) { this.output = output; } public Long getId() { return this.id; } public void setId(Long id) { this.id = id; } public String getOutput() { return this.output; } public void setOutput(String output) { this.output = output; } @Override public String toString() { return "TaskRunOutput{" + "id=" + this.id + ", output='" + this.output + '\'' + '}'; } } ================================================ FILE: spring-cloud-task-samples/jpa-sample/src/main/java/io/spring/configuration/TaskRunRepository.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.configuration; import org.springframework.data.jpa.repository.JpaRepository; /** * @author Pas Apicella * @author Glenn Renfro */ public interface TaskRunRepository extends JpaRepository { } ================================================ FILE: spring-cloud-task-samples/jpa-sample/src/main/java/io/spring/configuration/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Configuration classes for the JPA sample application. */ package io.spring.configuration; ================================================ FILE: spring-cloud-task-samples/jpa-sample/src/main/java/io/spring/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * JPA sample application for Spring Cloud Task. */ package io.spring; ================================================ FILE: spring-cloud-task-samples/jpa-sample/src/main/resources/application-cloud.yml ================================================ spring: jpa: hibernate: ddl-auto: update show-sql: true ================================================ FILE: spring-cloud-task-samples/jpa-sample/src/main/resources/application.yml ================================================ spring: jpa: hibernate: ddl-auto: update show-sql: true application: name: Spring Cloud Task JPA Sample Application logging: level: org: springframework: cloud: task: debug ================================================ FILE: spring-cloud-task-samples/jpa-sample/src/test/java/io/spring/JpaApplicationTests.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring; import java.sql.SQLException; import java.util.Map; import javax.sql.DataSource; import org.h2.tools.Server; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.boot.SpringApplication; import org.springframework.boot.test.system.CapturedOutput; import org.springframework.boot.test.system.OutputCaptureExtension; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.datasource.DriverManagerDataSource; import org.springframework.test.util.TestSocketUtils; import static org.assertj.core.api.Assertions.assertThat; /** * Verifies that a JPA Application can write its data to a repository. * * @author Glenn Renfro */ @ExtendWith(OutputCaptureExtension.class) public class JpaApplicationTests { private final static String DATASOURCE_URL; private final static String DATASOURCE_USER_NAME = "SA"; private final static String DATASOURCE_USER_PASSWORD = ""; private final static String DATASOURCE_DRIVER_CLASS_NAME = "org.h2.Driver"; private static int randomPort; static { randomPort = TestSocketUtils.findAvailableTcpPort(); DATASOURCE_URL = "jdbc:h2:tcp://localhost:" + randomPort + "/mem:dataflow;DB_CLOSE_DELAY=-1;" + "DB_CLOSE_ON_EXIT=FALSE"; } private ConfigurableApplicationContext context; private DataSource dataSource; private Server server; @BeforeEach public void setup() { DriverManagerDataSource dataSource = new DriverManagerDataSource(); dataSource.setDriverClassName(DATASOURCE_DRIVER_CLASS_NAME); dataSource.setUrl(DATASOURCE_URL); dataSource.setUsername(DATASOURCE_USER_NAME); dataSource.setPassword(DATASOURCE_USER_PASSWORD); this.dataSource = dataSource; try { this.server = Server .createTcpServer("-tcp", "-ifNotExists", "-tcpAllowOthers", "-tcpPort", String.valueOf(randomPort)) .start(); } catch (SQLException e) { throw new IllegalStateException(e); } } @AfterEach public void tearDown() { if (this.context != null && this.context.isActive()) { this.context.close(); } this.server.stop(); } @Test public void testBatchJobApp(CapturedOutput capturedOutput) { final String INSERT_MESSAGE = "Hibernate: insert into task_run_output ("; this.context = SpringApplication.run(JpaApplication.class, "--spring.datasource.url=" + DATASOURCE_URL, "--spring.datasource.username=" + DATASOURCE_USER_NAME, "--spring.datasource.driverClassName=" + DATASOURCE_DRIVER_CLASS_NAME, "--spring.jpa.database-platform=org.hibernate.dialect.H2Dialect"); String output = capturedOutput.toString(); assertThat(output.contains(INSERT_MESSAGE)).as("Unable to find the insert message: " + output).isTrue(); JdbcTemplate template = new JdbcTemplate(this.dataSource); Map result = template.queryForMap("Select * from TASK_RUN_OUTPUT"); assertThat(result.get("ID")).isEqualTo(1L); assertThat(((String) result.get("OUTPUT"))).contains("Executed at"); } } ================================================ FILE: spring-cloud-task-samples/jpa-sample/src/test/resources/application.properties ================================================ # # Copyright 2018-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # logging.level.root=DEBUG spring.application.name=Spring Cloud Task JPA Sample Application ================================================ FILE: spring-cloud-task-samples/multiple-datasources/.mvn/wrapper/maven-wrapper.properties ================================================ distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.6.0/apache-maven-3.6.0-bin.zip ================================================ FILE: spring-cloud-task-samples/multiple-datasources/README.adoc ================================================ = Multiple DataSources Sample Task This is a Spring Boot application that utilizes two DataSources and explicitly configures which one to be used for the Spring Cloud Task repository. == Requirements: * Java 17 or Above == Classes: * `MultipleDataSourcesApplication` - the Spring Boot Main Application. * `SampleCommandLineRunner` - the `CommandLineRunner` implementation for this task. It outputs the number of `DataSource` beans found in the context (should be 2). * `EmbeddedDataSourceConfiguration` - Configures two `DataSource` beans using embedded databases. * `ExternalDataSourceConfiguration` - Configures two `DataSource` beans using external databases. * `CustomTaskConfigurer` - Uses a Spring `@Qualifier` to specify the correct `DataSource` to use. == Build: [source,shell] ---- mvn clean package ---- == Execute sample using 2 embedded databases (default): [source,shell] ---- java -jar target/multiple-datasources-5.0.0.jar ---- == Native Build: [source,shell] ---- mvn -Pnative native:compile ---- == RUn sample using 2 embedded databases (default) with native app: [source,shell] ---- ./target/multiple-datasources ---- == Execute sample using 2 external databases: Using the `external` profile, users will be able to establish both the default `spring.datasource` data source and a `second.datasource` data source. For example: [source,shell,indent=2] ---- export spring_datasource_url= export spring_datasource_username= export spring_datasource_password= export spring_datasource_driverClassName=org.mariadb.jdbc.Driver export second_datasource_url=jdbc: export second_datasource_username= export second_datasource_password= export second_datasource_driverClassName=org.mariadb.jdbc.Driver java -jar target/multiple-datasources-2.3.0-RELEASE.jar --spring.profiles.active=external ---- ================================================ FILE: spring-cloud-task-samples/multiple-datasources/mvnw ================================================ #!/bin/sh # ---------------------------------------------------------------------------- # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # Maven2 Start Up Batch script # # Required ENV vars: # ------------------ # JAVA_HOME - location of a JDK home dir # # Optional ENV vars # ----------------- # M2_HOME - location of maven2's installed home dir # MAVEN_OPTS - parameters passed to the Java VM when running Maven # e.g. to debug Maven itself, use # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 # MAVEN_SKIP_RC - flag to disable loading of mavenrc files # ---------------------------------------------------------------------------- if [ -z "$MAVEN_SKIP_RC" ] ; then if [ -f /etc/mavenrc ] ; then . /etc/mavenrc fi if [ -f "$HOME/.mavenrc" ] ; then . "$HOME/.mavenrc" fi fi # OS specific support. $var _must_ be set to either true or false. cygwin=false; darwin=false; mingw=false case "`uname`" in CYGWIN*) cygwin=true ;; MINGW*) mingw=true;; Darwin*) darwin=true # # Look for the Apple JDKs first to preserve the existing behaviour, and then look # for the new JDKs provided by Oracle. # if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK ] ; then # # Apple JDKs # export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home fi if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Java/JavaVirtualMachines/CurrentJDK ] ; then # # Apple JDKs # export JAVA_HOME=/System/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home fi if [ -z "$JAVA_HOME" ] && [ -L "/Library/Java/JavaVirtualMachines/CurrentJDK" ] ; then # # Oracle JDKs # export JAVA_HOME=/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home fi if [ -z "$JAVA_HOME" ] && [ -x "/usr/libexec/java_home" ]; then # # Apple JDKs # export JAVA_HOME=`/usr/libexec/java_home` fi ;; esac if [ -z "$JAVA_HOME" ] ; then if [ -r /etc/gentoo-release ] ; then JAVA_HOME=`java-config --jre-home` fi fi if [ -z "$M2_HOME" ] ; then ## resolve links - $0 may be a link to maven's home PRG="$0" # need this for relative symlinks while [ -h "$PRG" ] ; do ls=`ls -ld "$PRG"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then PRG="$link" else PRG="`dirname "$PRG"`/$link" fi done saveddir=`pwd` M2_HOME=`dirname "$PRG"`/.. # make it fully qualified M2_HOME=`cd "$M2_HOME" && pwd` cd "$saveddir" # echo Using m2 at $M2_HOME fi # For Cygwin, ensure paths are in UNIX format before anything is touched if $cygwin ; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --unix "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"` fi # For Migwn, ensure paths are in UNIX format before anything is touched if $mingw ; then [ -n "$M2_HOME" ] && M2_HOME="`(cd "$M2_HOME"; pwd)`" [ -n "$JAVA_HOME" ] && JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" # TODO classpath? fi if [ -z "$JAVA_HOME" ]; then javaExecutable="`which javac`" if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then # readlink(1) is not available as standard on Solaris 10. readLink=`which readlink` if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then if $darwin ; then javaHome="`dirname \"$javaExecutable\"`" javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" else javaExecutable="`readlink -f \"$javaExecutable\"`" fi javaHome="`dirname \"$javaExecutable\"`" javaHome=`expr "$javaHome" : '\(.*\)/bin'` JAVA_HOME="$javaHome" export JAVA_HOME fi fi fi if [ -z "$JAVACMD" ] ; then if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD="$JAVA_HOME/jre/sh/java" else JAVACMD="$JAVA_HOME/bin/java" fi else JAVACMD="`which java`" fi fi if [ ! -x "$JAVACMD" ] ; then echo "Error: JAVA_HOME is not defined correctly." >&2 echo " We cannot execute $JAVACMD" >&2 exit 1 fi if [ -z "$JAVA_HOME" ] ; then echo "Warning: JAVA_HOME environment variable is not set." fi CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher # For Cygwin, switch paths to Windows format before running java if $cygwin; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --path --windows "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"` fi # traverses directory structure from process work directory to filesystem root # first directory with .mvn subdirectory is considered project base directory find_maven_basedir() { local basedir=$(pwd) local wdir=$(pwd) while [ "$wdir" != '/' ] ; do if [ -d "$wdir"/.mvn ] ; then basedir=$wdir break fi wdir=$(cd "$wdir/.."; pwd) done echo "${basedir}" } # concatenates all lines of a file concat_lines() { if [ -f "$1" ]; then echo "$(tr -s '\n' ' ' < "$1")" fi } export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-$(find_maven_basedir)} MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" # Provide a "standardized" way to retrieve the CLI args that will # work with both Windows and non-Windows executions. MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" export MAVEN_CMD_LINE_ARGS WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain exec "$JAVACMD" \ $MAVEN_OPTS \ -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ ${WRAPPER_LAUNCHER} "$@" ================================================ FILE: spring-cloud-task-samples/multiple-datasources/mvnw.cmd ================================================ @REM ---------------------------------------------------------------------------- @REM Licensed to the Apache Software Foundation (ASF) under one @REM or more contributor license agreements. See the NOTICE file @REM distributed with this work for additional information @REM regarding copyright ownership. The ASF licenses this file @REM to you under the Apache License, Version 2.0 (the @REM "License"); you may not use this file except in compliance @REM with the License. You may obtain a copy of the License at @REM @REM https://www.apache.org/licenses/LICENSE-2.0 @REM @REM Unless required by applicable law or agreed to in writing, @REM software distributed under the License is distributed on an @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY @REM KIND, either express or implied. See the License for the @REM specific language governing permissions and limitations @REM under the License. @REM ---------------------------------------------------------------------------- @REM ---------------------------------------------------------------------------- @REM Maven2 Start Up Batch script @REM @REM Required ENV vars: @REM JAVA_HOME - location of a JDK home dir @REM @REM Optional ENV vars @REM M2_HOME - location of maven2's installed home dir @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven @REM e.g. to debug Maven itself, use @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files @REM ---------------------------------------------------------------------------- @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' @echo off @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% @REM set %HOME% to equivalent of $HOME if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") @REM Execute a user defined script before this one if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre @REM check for pre script, once with legacy .bat ending and once with .cmd ending if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" :skipRcPre @setlocal set ERROR_CODE=0 @REM To isolate internal variables from possible post scripts, we use another setlocal @setlocal @REM ==== START VALIDATION ==== if not "%JAVA_HOME%" == "" goto OkJHome echo. echo Error: JAVA_HOME not found in your environment. >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error :OkJHome if exist "%JAVA_HOME%\bin\java.exe" goto init echo. echo Error: JAVA_HOME is set to an invalid directory. >&2 echo JAVA_HOME = "%JAVA_HOME%" >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error @REM ==== END VALIDATION ==== :init set MAVEN_CMD_LINE_ARGS=%* @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". @REM Fallback to current working directory if not found. set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir set EXEC_DIR=%CD% set WDIR=%EXEC_DIR% :findBaseDir IF EXIST "%WDIR%"\.mvn goto baseDirFound cd .. IF "%WDIR%"=="%CD%" goto baseDirNotFound set WDIR=%CD% goto findBaseDir :baseDirFound set MAVEN_PROJECTBASEDIR=%WDIR% cd "%EXEC_DIR%" goto endDetectBaseDir :baseDirNotFound set MAVEN_PROJECTBASEDIR=%EXEC_DIR% cd "%EXEC_DIR%" :endDetectBaseDir IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig @setlocal EnableExtensions EnableDelayedExpansion for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% :endReadAdditionalConfig SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" set WRAPPER_JAR="".\.mvn\wrapper\maven-wrapper.jar"" set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CMD_LINE_ARGS% if ERRORLEVEL 1 goto error goto end :error set ERROR_CODE=1 :end @endlocal & set ERROR_CODE=%ERROR_CODE% if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost @REM check for post script, once with legacy .bat ending and once with .cmd ending if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" :skipRcPost @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' if "%MAVEN_BATCH_PAUSE%" == "on" pause if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% exit /B %ERROR_CODE% ================================================ FILE: spring-cloud-task-samples/multiple-datasources/pom.xml ================================================ 4.0.0 io.spring.cloud multiple-datasources jar 5.0.1-SNAPSHOT To show users how to enable a task with a multiple DataSources. Spring Cloud Task Multiple DataSources Application org.springframework.boot spring-boot-starter-parent 4.0.2 5.0.2-SNAPSHOT UTF-8 17 org.springframework.cloud spring-cloud-task-dependencies ${project.version} pom import org.springframework.cloud spring-cloud-starter-task org.springframework.boot spring-boot-starter-jdbc com.h2database h2 org.mariadb.jdbc mariadb-java-client org.springframework.boot spring-boot-starter-test test org.springframework.cloud spring-cloud-test-support ${spring-cloud-commons.version} test spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false org.springframework.boot spring-boot-maven-plugin org.apache.maven.plugins maven-javadoc-plugin attach-javadocs jar org.apache.maven.plugins maven-source-plugin attach-sources jar maven-deploy-plugin true ================================================ FILE: spring-cloud-task-samples/multiple-datasources/src/main/java/io/spring/MultipleDataSourcesApplication.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.task.configuration.EnableTask; /** * @author Michael Minella */ @EnableTask @SpringBootApplication public class MultipleDataSourcesApplication { public static void main(String[] args) { SpringApplication.run(MultipleDataSourcesApplication.class, args); } } ================================================ FILE: spring-cloud-task-samples/multiple-datasources/src/main/java/io/spring/configuration/CustomTaskConfigurer.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.configuration; import javax.sql.DataSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.cloud.task.configuration.DefaultTaskConfigurer; import org.springframework.stereotype.Component; /** * @author Michael Minella */ @Component public class CustomTaskConfigurer extends DefaultTaskConfigurer { @Autowired public CustomTaskConfigurer(@Qualifier("secondDataSource") DataSource dataSource) { super(dataSource); } } ================================================ FILE: spring-cloud-task-samples/multiple-datasources/src/main/java/io/spring/configuration/EmbeddedDataSourceConfiguration.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.configuration; import javax.sql.DataSource; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Profile; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; /** * Creates two data sources that use embedded databases. * * @author Michael Minella * @author Glenn Renfro */ @Configuration(proxyBeanMethods = false) @Profile("embedded") public class EmbeddedDataSourceConfiguration { @Bean public DataSource dataSource() { return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2).build(); } @Bean public DataSource secondDataSource() { return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2).build(); } } ================================================ FILE: spring-cloud-task-samples/multiple-datasources/src/main/java/io/spring/configuration/ExternalDataSourceConfiguration.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.configuration; import javax.sql.DataSource; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.jdbc.autoconfigure.DataSourceProperties; import org.springframework.boot.jdbc.DataSourceBuilder; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.Profile; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; /** * Creates two data sources that use external databases. * * @author Glenn Renfro */ @Configuration(proxyBeanMethods = false) @Profile("external") public class ExternalDataSourceConfiguration { @Bean(name = "springDataSourceProperties") @ConfigurationProperties("spring.datasource") @Primary public DataSourceProperties springDataSourceProperties() { return new DataSourceProperties(); } @Bean(name = "secondDataSourceProperties") @ConfigurationProperties("second.datasource") public DataSourceProperties myDataSourceProperties() { return new DataSourceProperties(); } @Bean(name = "springDataSource") @Primary public DataSource dataSource( @Qualifier("springDataSourceProperties") DataSourceProperties springDataSourceProperties) { return DataSourceBuilder.create() .driverClassName(springDataSourceProperties.getDriverClassName()) .url(springDataSourceProperties.getUrl()) .password(springDataSourceProperties.getPassword()) .username(springDataSourceProperties.getUsername()) .build(); } @Bean public DataSource secondDataSource( @Qualifier("secondDataSourceProperties") DataSourceProperties secondDataSourceProperties) { return DataSourceBuilder.create() .driverClassName(secondDataSourceProperties.getDriverClassName()) .url(secondDataSourceProperties.getUrl()) .password(secondDataSourceProperties.getPassword()) .username(secondDataSourceProperties.getUsername()) .build(); } } ================================================ FILE: spring-cloud-task-samples/multiple-datasources/src/main/java/io/spring/configuration/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Configuration classes for the multiple datasources sample application. */ package io.spring.configuration; ================================================ FILE: spring-cloud-task-samples/multiple-datasources/src/main/java/io/spring/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Multiple datasources sample application for Spring Cloud Task. */ package io.spring; ================================================ FILE: spring-cloud-task-samples/multiple-datasources/src/main/java/io/spring/task/SampleCommandLineRunner.java ================================================ /* * Copyright 2018-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.task; import java.util.List; import javax.sql.DataSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.CommandLineRunner; import org.springframework.stereotype.Component; /** * @author Michael Minella */ @Component public class SampleCommandLineRunner implements CommandLineRunner { private List dataSources; @Autowired public SampleCommandLineRunner(List dataSources) { this.dataSources = dataSources; } @Override public void run(String... args) throws Exception { System.out.println("There are " + this.dataSources.size() + " DataSources within this application"); } } ================================================ FILE: spring-cloud-task-samples/multiple-datasources/src/main/java/io/spring/task/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Task implementation classes for the multiple datasources sample application. */ package io.spring.task; ================================================ FILE: spring-cloud-task-samples/multiple-datasources/src/main/resources/application.properties ================================================ spring.application.name=Demo Multiple DataSources Task logging.level.org.springframework.cloud.task=DEBUG spring.profiles.active=embedded ================================================ FILE: spring-cloud-task-samples/multiple-datasources/src/test/java/io/spring/MultiDataSourcesApplicationTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.boot.SpringApplication; import org.springframework.boot.test.system.CapturedOutput; import org.springframework.boot.test.system.OutputCaptureExtension; import static org.assertj.core.api.Assertions.assertThat; /** * @author Michael Minella */ @ExtendWith(OutputCaptureExtension.class) public class MultiDataSourcesApplicationTests { @Test public void testTimeStampApp(CapturedOutput capturedOutput) throws Exception { SpringApplication.run(MultipleDataSourcesApplication.class, "--spring.profiles.active=embedded"); String output = capturedOutput.toString(); assertThat(output.contains("There are 2 DataSources within this application")) .as("Unable to find CommandLineRunner output: " + output) .isTrue(); assertThat(output.contains("Creating: TaskExecution{")).as("Unable to find start task message: " + output) .isTrue(); assertThat(output.contains("Updating: TaskExecution")).as("Unable to find update task message: " + output) .isTrue(); } } ================================================ FILE: spring-cloud-task-samples/multiple-datasources/src/test/java/io/spring/MultiDataSourcesExternalApplicationTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring; import java.sql.SQLException; import org.h2.tools.Server; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.boot.SpringApplication; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.system.CapturedOutput; import org.springframework.boot.test.system.OutputCaptureExtension; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.test.util.TestSocketUtils; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro */ @ExtendWith({ OutputCaptureExtension.class, SpringExtension.class }) @SpringBootTest(classes = { MultiDataSourcesExternalApplicationTests.TaskLauncherConfiguration.class }) public class MultiDataSourcesExternalApplicationTests { private final static String DATASOURCE_URL; private final static String SECOND_DATASOURCE_URL; private final static String DATASOURCE_USER_NAME = "SA"; private final static String DATASOURCE_USER_PASSWORD = "''"; private final static String DATASOURCE_DRIVER_CLASS_NAME = "org.h2.Driver"; private static int randomPort; private static int secondRandomPort; static { randomPort = TestSocketUtils.findAvailableTcpPort(); DATASOURCE_URL = "jdbc:h2:tcp://localhost:" + randomPort + "/mem:dataflow;DB_CLOSE_DELAY=-1;" + "DB_CLOSE_ON_EXIT=FALSE"; secondRandomPort = TestSocketUtils.findAvailableTcpPort(); SECOND_DATASOURCE_URL = "jdbc:h2:tcp://localhost:" + randomPort + "/mem:dataflow;DB_CLOSE_DELAY=-1;" + "DB_CLOSE_ON_EXIT=FALSE"; } @Test public void testTimeStampApp(CapturedOutput capturedOutput) throws Exception { SpringApplication.run(MultipleDataSourcesApplication.class, "--spring.profiles.active=external", "--spring.datasource.url=" + DATASOURCE_URL, "--spring.datasource.username=" + DATASOURCE_USER_NAME, "--spring.datasource.password=" + DATASOURCE_USER_PASSWORD, "--spring.datasource.driverClassName=" + DATASOURCE_DRIVER_CLASS_NAME, "--second.datasource.url=" + SECOND_DATASOURCE_URL, "--second.datasource.username=" + DATASOURCE_USER_NAME, "--second.datasource.password=" + DATASOURCE_USER_PASSWORD, "--second.datasource.driverClassName=" + DATASOURCE_DRIVER_CLASS_NAME); String output = capturedOutput.toString(); assertThat(output.contains("There are 2 DataSources within this application")) .as("Unable to find CommandLineRunner output: " + output) .isTrue(); assertThat(output.contains("Creating: TaskExecution{")).as("Unable to find start task message: " + output) .isTrue(); assertThat(output.contains("Updating: TaskExecution")).as("Unable to find update task message: " + output) .isTrue(); } @Configuration(proxyBeanMethods = false) public static class TaskLauncherConfiguration { private static Server defaultServer; private static Server secondServer; @Bean public Server initH2TCPServer() { Server server = null; try { if (defaultServer == null) { server = Server .createTcpServer("-ifNotExists", "-tcp", "-tcpAllowOthers", "-tcpPort", String.valueOf(randomPort)) .start(); defaultServer = server; } } catch (SQLException e) { throw new IllegalStateException(e); } return defaultServer; } @Bean public Server initSecondH2TCPServer() { Server server = null; try { if (secondServer == null) { server = Server .createTcpServer("-ifNotExists", "-tcp", "-tcpAllowOthers", "-tcpPort", String.valueOf(secondRandomPort)) .start(); secondServer = server; } } catch (SQLException e) { throw new IllegalStateException(e); } return secondServer; } } } ================================================ FILE: spring-cloud-task-samples/multiple-datasources/src/test/resources/application.properties ================================================ # # Copyright 2016-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # spring.application.name=Demo Multiple DataSources Task logging.level.org.springframework.cloud.task=DEBUG ================================================ FILE: spring-cloud-task-samples/pom.xml ================================================ 4.0.0 spring-cloud-task-samples pom Spring Cloud Task Samples org.springframework.cloud spring-cloud-task-parent 5.0.2-SNAPSHOT UTF-8 17 timestamp batch-job task-events batch-events jpa-sample task-observations multiple-datasources single-step-batch-job org.springframework.cloud spring-cloud-task-dependencies ${project.version} pom import org.apache.maven.plugins maven-javadoc-plugin attach-javadocs jar org.apache.maven.plugins maven-source-plugin attach-sources jar maven-deploy-plugin true ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/.mvn/wrapper/maven-wrapper.properties ================================================ distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.6.0/apache-maven-3.6.0-bin.zip ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/README.adoc ================================================ = Single Step Batch Job This is a Spring Cloud Task application that autoconfigures a single step Spring Batch job based on the profiles that are active. The profiles that are available are: * `ffreader` - Activates a FlatFileItemReader that reads from the `test.txt` file provided. * `ffwriter` - Activates a FlatFileItemWriter that writes to the `result.txt` file. * `jdbcreader` - Activates a JdbcCursorItemReader that reads from the `item_sample` table. * `jdbcwriter` - Activates a JdbcItemReader that writes to the `item` table. * `amqpreader` - Activates a AmqpItemReader that reads from the `samplequeue` queue. * `amqpwriter` - Activates a AmqpItemWriter that writes to the `sampleexchange` exchange. * `kafkareader` - Activates a KafkaItemReader that reads from the `sampletopic` topic. * `kafkawriter` - Activates a KafkaItemWriter that writes to the `sampletopic` topic. == Requirements: * Java 17 or Above == Classes: * SingleStepBatchJobApplication - the Spring Boot Main Application == Build: [source,shell] ---- mvn clean package ---- == Run: [source,shell] ---- java -jar target/single-step-batch-job-5.0.0-SNAPSHOT.jar --spring.config.name= ---- == Examples === FlatFileItemReader with a FlatFileItemWriter batch job In this example the batch job will read from the test.txt file from the resources directory and write a `result.txt` file to the root directory of the project. ``` java -Dspring.profiles.active=ffreader,ffwriter -jar target/single-step-batch-job-5.0.0-SNAPSHOT.jar ``` === FlatFileItemReader with a JdbcItemWriter batch job In this example the batch job will read from the test.txt file from the resources directory and write the result to the `item` table in your data store. ``` java -Dspring.profiles.active=ffreader,jdbcwriter -jar target/single-step-batch-job-5.0.0-SNAPSHOT.jar ``` Before running create the following table: ``` CREATE TABLE IF NOT EXISTS item ( item_name varchar(55) ); ``` === JdbcCursorItemReader with a JdbcItemWriter batch job In this example the batch job will read from the `item_sample` table in your data store (as specified in the default `DataSource` properties) and write the result to the `item` table in your data store (as specified in the default `DataSource` properties). ``` java -Dspring.profiles.active=jdbcreader,jdbcwriter -jar target/single-step-batch-job-5.0.0-SNAPSHOT.jar ``` Before running create the following tables: ``` CREATE TABLE IF NOT EXISTS item ( item_name varchar(55) ); CREATE TABLE IF NOT EXISTS item_sample ( ITEM_NAME varchar(55) ); INSERT INTO item_sample (item_name) VALUES ('foo'); INSERT INTO item_sample (item_name) VALUES ('bar'); INSERT INTO item_sample (item_name) VALUES ('baz'); INSERT INTO item_sample (item_name) VALUES ('boo'); INSERT INTO item_sample (item_name) VALUES ('qux'); INSERT INTO item_sample (item_name) VALUES ('Job'); ``` You may also wish to read from and write to data sources different from the default `DataSource`. This can be done specifying datasources for the reader and writer as follows: ``` # Jdbc Cursor Item Reader Data Source export jdbccursoritemreader_datasource_url= export jdbccursoritemreader_datasource_username= export jdbccursoritemreader_datasource_password= export jdbccursoritemreader_datasource_driverClassName= export spring_batch_job_jdbccursoritemreader_datasource_enable=true # Jdbc Batch Item Writer Data Source export jdbcbatchitemwriter_datasource_url= export jdbcbatchitemwriter_datasource_username= export jdbcbatchitemwriter_datasource_password= export jdbcbatchitemwriter_datasource_driverClassName= export spring_batch_job_jdbcbatchitemwriter_datasource_enable=true ``` === JdbcCursorItemReader with FlatfileItemWriter batch job In this example the batch job will read from the `item_sample` table in your data store and write the result to the `result.txt` file to the root directory of the project. ``` java -Dspring.profiles.active=jdbcreader,ffwriter -jar target/single-step-batch-job-5.0.0-SNAPSHOT.jar ``` Before running create the following table: ``` CREATE TABLE IF NOT EXISTS item_sample ( ITEM_NAME varchar(55) ); INSERT INTO item_sample (item_name) VALUES ('foo'); INSERT INTO item_sample (item_name) VALUES ('bar'); INSERT INTO item_sample (item_name) VALUES ('baz'); INSERT INTO item_sample (item_name) VALUES ('boo'); INSERT INTO item_sample (item_name) VALUES ('qux'); INSERT INTO item_sample (item_name) VALUES ('Job'); ``` === FlatfileItemReader with AmqpItemWriter batch job In this example the batch job will read from the `test.txt` file and write the result to the `sampleexchange` exchange. ``` java -Dspring.profiles.active=ffreader,amqpwriter -jar target/single-step-batch-job-5.0.0-SNAPSHOT.jar ``` NOTE: Before running create an exchange named `sampleexchange`. === AmqpItemReader with FlatfileItemWriter batch job In this example the batch job will read from the `samplequeue` queue and write the result to the `result.txt` in the current directory. ``` java -Dspring.profiles.active=amqpreader,ffwriter -jar target/single-step-batch-job-5.0.0-SNAPSHOT.jar ``` NOTE: Before running create and populate a queue named `samplequeue`. === FlatfileItemReader with KafkaItemWriter batch job In this example the batch job will read from the `test.txt` file and write the result to the `sampletopic` topic. ``` java -Dspring.profiles.active=ffreader,kafkawriter -jar target/single-step-batch-job-5.0.0-SNAPSHOT.jar ``` Before running create a topic named `sampletopic`. For example: ``` kafka-topics.sh --create --topic sampletopic --bootstrap-server localhost:9092 ``` === KafkaItemReader with FlatfileItemWriter batch job In this example the batch job will read from the `sampletopic` topic and write the result to the `result.txt` in the current directory. ``` java -Dspring.profiles.active=kafkareader,ffwriter -jar target/single-step-batch-job-5.0.0-SNAPSHOT.jar ``` Before running populate the topic named `sampletopic`. For example populate it using the FlatfileItemReader and KafkaItemWriter from above: ``` java -Dspring.profiles.active=ffreader,kafkawriter -jar target/single-step-batch-job-5.0.0-SNAPSHOT.jar ``` ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/mvnw ================================================ #!/bin/sh # ---------------------------------------------------------------------------- # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # Maven2 Start Up Batch script # # Required ENV vars: # ------------------ # JAVA_HOME - location of a JDK home dir # # Optional ENV vars # ----------------- # M2_HOME - location of maven2's installed home dir # MAVEN_OPTS - parameters passed to the Java VM when running Maven # e.g. to debug Maven itself, use # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 # MAVEN_SKIP_RC - flag to disable loading of mavenrc files # ---------------------------------------------------------------------------- if [ -z "$MAVEN_SKIP_RC" ] ; then if [ -f /etc/mavenrc ] ; then . /etc/mavenrc fi if [ -f "$HOME/.mavenrc" ] ; then . "$HOME/.mavenrc" fi fi # OS specific support. $var _must_ be set to either true or false. cygwin=false; darwin=false; mingw=false case "`uname`" in CYGWIN*) cygwin=true ;; MINGW*) mingw=true;; Darwin*) darwin=true # # Look for the Apple JDKs first to preserve the existing behaviour, and then look # for the new JDKs provided by Oracle. # if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK ] ; then # # Apple JDKs # export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home fi if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Java/JavaVirtualMachines/CurrentJDK ] ; then # # Apple JDKs # export JAVA_HOME=/System/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home fi if [ -z "$JAVA_HOME" ] && [ -L "/Library/Java/JavaVirtualMachines/CurrentJDK" ] ; then # # Oracle JDKs # export JAVA_HOME=/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home fi if [ -z "$JAVA_HOME" ] && [ -x "/usr/libexec/java_home" ]; then # # Apple JDKs # export JAVA_HOME=`/usr/libexec/java_home` fi ;; esac if [ -z "$JAVA_HOME" ] ; then if [ -r /etc/gentoo-release ] ; then JAVA_HOME=`java-config --jre-home` fi fi if [ -z "$M2_HOME" ] ; then ## resolve links - $0 may be a link to maven's home PRG="$0" # need this for relative symlinks while [ -h "$PRG" ] ; do ls=`ls -ld "$PRG"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then PRG="$link" else PRG="`dirname "$PRG"`/$link" fi done saveddir=`pwd` M2_HOME=`dirname "$PRG"`/.. # make it fully qualified M2_HOME=`cd "$M2_HOME" && pwd` cd "$saveddir" # echo Using m2 at $M2_HOME fi # For Cygwin, ensure paths are in UNIX format before anything is touched if $cygwin ; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --unix "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"` fi # For Migwn, ensure paths are in UNIX format before anything is touched if $mingw ; then [ -n "$M2_HOME" ] && M2_HOME="`(cd "$M2_HOME"; pwd)`" [ -n "$JAVA_HOME" ] && JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" # TODO classpath? fi if [ -z "$JAVA_HOME" ]; then javaExecutable="`which javac`" if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then # readlink(1) is not available as standard on Solaris 10. readLink=`which readlink` if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then if $darwin ; then javaHome="`dirname \"$javaExecutable\"`" javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" else javaExecutable="`readlink -f \"$javaExecutable\"`" fi javaHome="`dirname \"$javaExecutable\"`" javaHome=`expr "$javaHome" : '\(.*\)/bin'` JAVA_HOME="$javaHome" export JAVA_HOME fi fi fi if [ -z "$JAVACMD" ] ; then if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD="$JAVA_HOME/jre/sh/java" else JAVACMD="$JAVA_HOME/bin/java" fi else JAVACMD="`which java`" fi fi if [ ! -x "$JAVACMD" ] ; then echo "Error: JAVA_HOME is not defined correctly." >&2 echo " We cannot execute $JAVACMD" >&2 exit 1 fi if [ -z "$JAVA_HOME" ] ; then echo "Warning: JAVA_HOME environment variable is not set." fi CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher # For Cygwin, switch paths to Windows format before running java if $cygwin; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --path --windows "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"` fi # traverses directory structure from process work directory to filesystem root # first directory with .mvn subdirectory is considered project base directory find_maven_basedir() { local basedir=$(pwd) local wdir=$(pwd) while [ "$wdir" != '/' ] ; do if [ -d "$wdir"/.mvn ] ; then basedir=$wdir break fi wdir=$(cd "$wdir/.."; pwd) done echo "${basedir}" } # concatenates all lines of a file concat_lines() { if [ -f "$1" ]; then echo "$(tr -s '\n' ' ' < "$1")" fi } export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-$(find_maven_basedir)} MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" # Provide a "standardized" way to retrieve the CLI args that will # work with both Windows and non-Windows executions. MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" export MAVEN_CMD_LINE_ARGS WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain exec "$JAVACMD" \ $MAVEN_OPTS \ -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ ${WRAPPER_LAUNCHER} "$@" ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/pom.xml ================================================ 4.0.0 io.spring.cloud single-step-batch-job jar Single Step Batch Job Task 5.0.0-SNAPSHOT Spring Cloud Single Step Batch Job Task org.springframework.boot spring-boot-starter-parent 4.0.2 5.0.2-SNAPSHOT true org.springframework.cloud spring-cloud-task-dependencies ${project.version} pom import org.springframework.boot spring-boot-starter-test test org.springframework.cloud spring-cloud-starter-task ${project.version} org.springframework.boot spring-boot-configuration-processor org.springframework.boot spring-boot-starter-jdbc com.h2database h2 org.mariadb.jdbc mariadb-java-client org.springframework.boot spring-boot-starter-batch org.springframework.cloud spring-cloud-starter-single-step-batch-job 5.0.0-SNAPSHOT org.springframework.batch spring-batch-test org.springframework.cloud spring-cloud-test-support ${spring-cloud-commons.version} test spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false org.springframework.boot spring-boot-maven-plugin org.apache.maven.plugins maven-javadoc-plugin attach-javadocs jar org.apache.maven.plugins maven-source-plugin attach-sources jar maven-deploy-plugin true ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/main/java/io/spring/SingleStepBatchJobApplication.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.task.configuration.EnableTask; @EnableTask @SpringBootApplication public class SingleStepBatchJobApplication { public static void main(String[] args) { SpringApplication.run(SingleStepBatchJobApplication.class, args); } } ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/main/java/io/spring/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Single-step batch job sample application for Spring Cloud Task. */ package io.spring; ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/main/resources/application-amqpreader.properties ================================================ spring.batch.job.amqpitemreader.name=testreader spring.cloud.task.closecontextEnabled=true spring.batch.job.amqpitemreader.enabled=true spring.rabbitmq.template.defaultReceiveQueue=samplequeue spring.rabbitmq.host=localhost spring.rabbitmq.port=5672 ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/main/resources/application-amqpwriter.properties ================================================ spring.batch.job.amqpitemwriter.enabled=true spring.cloud.task.closecontextEnabled=true spring.rabbitmq.template.exchange=sampleexchange ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/main/resources/application-ffreader.properties ================================================ spring.batch.job.flatfileitemreader.savestate=true spring.batch.job.flatfileitemreader.name=fixedWidthConfiguration spring.batch.job.flatfileitemreader.comments=#,$ spring.batch.job.flatfileitemreader.resource=/test.txt spring.batch.job.flatfileitemreader.strict=true spring.batch.job.flatfileitemreader.fixedLength=true spring.batch.job.flatfileitemreader.ranges=1-3 spring.batch.job.flatfileitemreader.names=ITEM_NAME spring.batch.job.flatfileitemreader.parsingStrict=false ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/main/resources/application-ffwriter.properties ================================================ spring.batch.job.flatfileitemwriter.name=fooWriter spring.batch.job.flatfileitemwriter.resource=file:result.txt spring.batch.job.flatfileitemwriter.encoding=UTF-16 spring.batch.job.flatfileitemwriter.saveState=false spring.batch.job.flatfileitemwriter.shouldDeleteIfEmpty=true spring.batch.job.flatfileitemwriter.delimited=true spring.batch.job.flatfileitemwriter.names=ITEM_NAME spring.batch.job.flatfileitemwriter.append=true spring.batch.job.flatfileitemwriter.forceSync=true spring.batch.job.flatfileitemwriter.shouldDeleteIfExists=false spring.batch.job.flatfileitemwriter.transactional=false ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/main/resources/application-jdbcreader.properties ================================================ spring.batch.job.jdbccursoritemreader.name=fooReader spring.batch.job.jdbccursoritemreader.sql=select item_name from item_sample order by item_name ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/main/resources/application-jdbcwriter.properties ================================================ spring.batch.job.jdbcbatchitemwriter.name=jdbcWriter spring.batch.job.jdbcbatchitemwriter.sql=INSERT INTO item (item_name) VALUES (:ITEM_NAME) ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/main/resources/application-kafkareader.properties ================================================ spring.kafka.consumer.bootstrapServers=localhost:9092 spring.batch.job.kafkaitemreader.name=testreader spring.kafka.consumer.groupId=1 spring.kafka.consumer.bootstrapServers=localhost:9092 spring.kafka.consumer.valueDeserializer=org.springframework.kafka.support.serializer.JsonDeserializer spring.kafka.consumer.keyDeserializer=org.springframework.kafka.support.serializer.JsonDeserializer spring.batch.job.kafkaitemreader.topic=sampletopic spring.batch.job.kafkaitemreader.pollTimeOutInSeconds=2 ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/main/resources/application-kafkawriter.properties ================================================ spring.kafka.producer.bootstrapServers=localhost:9092 spring.kafka.producer.keySerializer=org.springframework.kafka.support.serializer.JsonSerializer spring.kafka.producer.valueSerializer=org.springframework.kafka.support.serializer.JsonSerializer spring.batch.job.kafkaitemwriter.topic=sampletopic ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/main/resources/application.properties ================================================ spring.application.name=Single Step Batch Job spring.batch.job.jobName=job spring.batch.job.stepName=step1 spring.batch.job.chunkSize=5 spring.batch.jdbc.initialize-schema=always logging.level.org.springframework.cloud.task=debug ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/main/resources/test.txt ================================================ foo bar baz qux boo Job ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/test/java/io/spring/BatchJobApplicationTests.java ================================================ /* * Copyright 2020-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring; import java.io.File; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.sql.SQLException; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.sql.DataSource; import org.assertj.core.api.Assertions; import org.h2.tools.Server; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.boot.SpringApplication; import org.springframework.boot.jdbc.DataSourceBuilder; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.FileSystemResource; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.datasource.DriverManagerDataSource; import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; import org.springframework.test.util.TestSocketUtils; import static org.assertj.core.api.Assertions.assertThat; /** * Verifies that the SingleStepBatch Job for various scenarios are created properly. * * @author Glenn Renfro */ public class BatchJobApplicationTests { private final static String DATASOURCE_URL; private final static String DATASOURCE_USER_NAME = "SA"; private final static String DATASOURCE_USER_PASSWORD = "''"; private final static String DATASOURCE_DRIVER_CLASS_NAME = "org.h2.Driver"; private static int randomPort; private static Server defaultServer; static { randomPort = TestSocketUtils.findAvailableTcpPort(); DATASOURCE_URL = "jdbc:h2:tcp://localhost:" + randomPort + "/mem:dataflow;DB_CLOSE_DELAY=-1;" + "DB_CLOSE_ON_EXIT=FALSE"; } private File outputFile; @BeforeEach public void setup() throws Exception { outputFile = new File("result.txt"); initH2TCPServer(); } @AfterEach public void tearDown() throws Exception { Files.deleteIfExists(Paths.get(outputFile.getAbsolutePath())); DriverManagerDataSource dataSource = new DriverManagerDataSource(); dataSource.setDriverClassName(DATASOURCE_DRIVER_CLASS_NAME); dataSource.setUrl(DATASOURCE_URL); dataSource.setUsername(DATASOURCE_USER_NAME); dataSource.setPassword(DATASOURCE_USER_PASSWORD); JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); jdbcTemplate.execute("TRUNCATE TABLE item"); } @Test public void testFileReaderJdbcWriter() throws Exception { getSpringApplication().run(SingleStepBatchJobApplication.class, "--spring.profiles.active=ffreader,jdbcwriter", "--spring.datasource.username=" + DATASOURCE_USER_NAME, "--spring.datasource.url=" + DATASOURCE_URL, "--spring.datasource.driver-class-name=" + DATASOURCE_DRIVER_CLASS_NAME, "--spring.datasource.password=" + DATASOURCE_USER_PASSWORD, "foo=testFileReaderJdbcWriter"); validateDBResult(); } @Test public void testJdbcReaderJdbcWriter() throws Exception { getSpringApplication().run(SingleStepBatchJobApplication.class, "--spring.profiles.active=jdbcreader,jdbcwriter", "--spring.datasource.username=" + DATASOURCE_USER_NAME, "--spring.datasource.url=" + DATASOURCE_URL, "--spring.datasource.driver-class-name=" + DATASOURCE_DRIVER_CLASS_NAME, "--spring.datasource.password=" + DATASOURCE_USER_PASSWORD, "foo=testJdbcReaderJdbcWriter"); validateDBResult(); } @Test public void testJdbcReaderFlatfileWriter() throws Exception { getSpringApplication().run(SingleStepBatchJobApplication.class, "--spring.profiles.active=jdbcreader,ffwriter", "--spring.datasource.username=" + DATASOURCE_USER_NAME, "--spring.datasource.url=" + DATASOURCE_URL, "--spring.datasource.driver-class-name=" + DATASOURCE_DRIVER_CLASS_NAME, "--spring.datasource.password=" + DATASOURCE_USER_PASSWORD, "foo=testJdbcReaderFlatfileWriter"); validateFileResult(); } @Test public void testFileReaderFileWriter() throws Exception { getSpringApplication().run(SingleStepBatchJobApplication.class, "--spring.profiles.active=ffreader,ffwriter", "foo=testFileReaderFileWriter"); validateFileResult(); } public Server initH2TCPServer() throws SQLException { Server server; if (defaultServer == null) { server = Server .createTcpServer("-ifNotExists", "-tcp", "-tcpAllowOthers", "-tcpPort", String.valueOf(randomPort)) .start(); defaultServer = server; DriverManagerDataSource dataSource = new DriverManagerDataSource(); dataSource.setDriverClassName(DATASOURCE_DRIVER_CLASS_NAME); dataSource.setUrl(DATASOURCE_URL); dataSource.setUsername(DATASOURCE_USER_NAME); dataSource.setPassword(DATASOURCE_USER_PASSWORD); ClassPathResource setupResource = new ClassPathResource("schema-h2.sql"); ResourceDatabasePopulator resourceDatabasePopulator = new ResourceDatabasePopulator(setupResource); resourceDatabasePopulator.execute(dataSource); } return defaultServer; } private void validateFileResult() throws Exception { assertThat(Assertions.linesOf(this.outputFile, StandardCharsets.UTF_16).size()).isEqualTo(6); assertThat(Assertions.contentOf((new ClassPathResource("testresult.txt")).getFile()) .equals(new FileSystemResource(this.outputFile))); } private void validateDBResult() { DataSource dataSource = getDataSource(); JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); List> result = jdbcTemplate.queryForList("SELECT item_name FROM item ORDER BY item_name"); assertThat(result.size()).isEqualTo(6); assertThat(result.get(0).get("item_name")).isEqualTo("Job"); assertThat(result.get(1).get("item_name")).isEqualTo("bar"); assertThat(result.get(2).get("item_name")).isEqualTo("baz"); assertThat(result.get(3).get("item_name")).isEqualTo("boo"); assertThat(result.get(4).get("item_name")).isEqualTo("foo"); assertThat(result.get(5).get("item_name")).isEqualTo("qux"); } private DataSource getDataSource() { DataSourceBuilder dataSourceBuilder = DataSourceBuilder.create(); dataSourceBuilder.driverClassName(DATASOURCE_DRIVER_CLASS_NAME); dataSourceBuilder.url(DATASOURCE_URL); dataSourceBuilder.username(DATASOURCE_USER_NAME); dataSourceBuilder.password(DATASOURCE_USER_PASSWORD); return dataSourceBuilder.build(); } private SpringApplication getSpringApplication() { SpringApplication springApplication = new SpringApplication(); Map properties = new HashMap<>(); properties.put("spring.application.name", "Single Step Batch Job"); properties.put("spring.batch.job.jobName", "job"); properties.put("spring.batch.job.stepName", "step1"); properties.put("spring.batch.job.chunkSize", "5"); springApplication.setDefaultProperties(properties); return springApplication; } } ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/test/resources/schema-h2.sql ================================================ CREATE TABLE IF NOT EXISTS item ( item_name varchar(55) ); CREATE TABLE IF NOT EXISTS item_sample ( item_name varchar(55) ); INSERT INTO item_sample (item_name) VALUES ('foo'); INSERT INTO item_sample (item_name) VALUES ('bar'); INSERT INTO item_sample (item_name) VALUES ('baz'); INSERT INTO item_sample (item_name) VALUES ('boo'); INSERT INTO item_sample (item_name) VALUES ('qux'); INSERT INTO item_sample (item_name) VALUES ('Job'); ================================================ FILE: spring-cloud-task-samples/single-step-batch-job/src/test/resources/test.txt ================================================ Job bar baz boo foo qux ================================================ FILE: spring-cloud-task-samples/task-events/.mvn/wrapper/maven-wrapper.properties ================================================ distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.6.0/apache-maven-3.6.0-bin.zip ================================================ FILE: spring-cloud-task-samples/task-events/README.adoc ================================================ = Task Events This is a task application that emits events on a channel named `task-events` == Requirements: * Java 17 or Above == Build: [source,shell] ---- ./mvnw clean install ---- == Execution: [source,shell] ---- java -jar target/task-events-5.0.0.RELEASE.jar ---- You can listen for the events on the task-events channel with a Spring Cloud Stream Sink like the https://github.com/spring-cloud/stream-applications/tree/main/applications/sink/log-sink[log sink] using the following: [source,shell] ---- java -jar /log-sink-rabbit-3.1.2.jar --server.port=9090 --spring.cloud.stream.bindings.input.destination=task-events ---- == Dependencies: The task-events sample requires an instance of RabbitMQ to be running. ================================================ FILE: spring-cloud-task-samples/task-events/mvnw ================================================ #!/bin/sh # ---------------------------------------------------------------------------- # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # Maven2 Start Up Batch script # # Required ENV vars: # ------------------ # JAVA_HOME - location of a JDK home dir # # Optional ENV vars # ----------------- # M2_HOME - location of maven2's installed home dir # MAVEN_OPTS - parameters passed to the Java VM when running Maven # e.g. to debug Maven itself, use # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 # MAVEN_SKIP_RC - flag to disable loading of mavenrc files # ---------------------------------------------------------------------------- if [ -z "$MAVEN_SKIP_RC" ] ; then if [ -f /etc/mavenrc ] ; then . /etc/mavenrc fi if [ -f "$HOME/.mavenrc" ] ; then . "$HOME/.mavenrc" fi fi # OS specific support. $var _must_ be set to either true or false. cygwin=false; darwin=false; mingw=false case "`uname`" in CYGWIN*) cygwin=true ;; MINGW*) mingw=true;; Darwin*) darwin=true # # Look for the Apple JDKs first to preserve the existing behaviour, and then look # for the new JDKs provided by Oracle. # if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK ] ; then # # Apple JDKs # export JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/CurrentJDK/Home fi if [ -z "$JAVA_HOME" ] && [ -L /System/Library/Java/JavaVirtualMachines/CurrentJDK ] ; then # # Apple JDKs # export JAVA_HOME=/System/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home fi if [ -z "$JAVA_HOME" ] && [ -L "/Library/Java/JavaVirtualMachines/CurrentJDK" ] ; then # # Oracle JDKs # export JAVA_HOME=/Library/Java/JavaVirtualMachines/CurrentJDK/Contents/Home fi if [ -z "$JAVA_HOME" ] && [ -x "/usr/libexec/java_home" ]; then # # Apple JDKs # export JAVA_HOME=`/usr/libexec/java_home` fi ;; esac if [ -z "$JAVA_HOME" ] ; then if [ -r /etc/gentoo-release ] ; then JAVA_HOME=`java-config --jre-home` fi fi if [ -z "$M2_HOME" ] ; then ## resolve links - $0 may be a link to maven's home PRG="$0" # need this for relative symlinks while [ -h "$PRG" ] ; do ls=`ls -ld "$PRG"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then PRG="$link" else PRG="`dirname "$PRG"`/$link" fi done saveddir=`pwd` M2_HOME=`dirname "$PRG"`/.. # make it fully qualified M2_HOME=`cd "$M2_HOME" && pwd` cd "$saveddir" # echo Using m2 at $M2_HOME fi # For Cygwin, ensure paths are in UNIX format before anything is touched if $cygwin ; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --unix "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"` fi # For Migwn, ensure paths are in UNIX format before anything is touched if $mingw ; then [ -n "$M2_HOME" ] && M2_HOME="`(cd "$M2_HOME"; pwd)`" [ -n "$JAVA_HOME" ] && JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" # TODO classpath? fi if [ -z "$JAVA_HOME" ]; then javaExecutable="`which javac`" if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then # readlink(1) is not available as standard on Solaris 10. readLink=`which readlink` if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then if $darwin ; then javaHome="`dirname \"$javaExecutable\"`" javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" else javaExecutable="`readlink -f \"$javaExecutable\"`" fi javaHome="`dirname \"$javaExecutable\"`" javaHome=`expr "$javaHome" : '\(.*\)/bin'` JAVA_HOME="$javaHome" export JAVA_HOME fi fi fi if [ -z "$JAVACMD" ] ; then if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD="$JAVA_HOME/jre/sh/java" else JAVACMD="$JAVA_HOME/bin/java" fi else JAVACMD="`which java`" fi fi if [ ! -x "$JAVACMD" ] ; then echo "Error: JAVA_HOME is not defined correctly." >&2 echo " We cannot execute $JAVACMD" >&2 exit 1 fi if [ -z "$JAVA_HOME" ] ; then echo "Warning: JAVA_HOME environment variable is not set." fi CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher # For Cygwin, switch paths to Windows format before running java if $cygwin; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --path --windows "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"` fi # traverses directory structure from process work directory to filesystem root # first directory with .mvn subdirectory is considered project base directory find_maven_basedir() { local basedir=$(pwd) local wdir=$(pwd) while [ "$wdir" != '/' ] ; do if [ -d "$wdir"/.mvn ] ; then basedir=$wdir break fi wdir=$(cd "$wdir/.."; pwd) done echo "${basedir}" } # concatenates all lines of a file concat_lines() { if [ -f "$1" ]; then echo "$(tr -s '\n' ' ' < "$1")" fi } export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-$(find_maven_basedir)} MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" # Provide a "standardized" way to retrieve the CLI args that will # work with both Windows and non-Windows executions. MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" export MAVEN_CMD_LINE_ARGS WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain exec "$JAVACMD" \ $MAVEN_OPTS \ -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ ${WRAPPER_LAUNCHER} "$@" ================================================ FILE: spring-cloud-task-samples/task-events/mvnw.cmd ================================================ @REM ---------------------------------------------------------------------------- @REM Licensed to the Apache Software Foundation (ASF) under one @REM or more contributor license agreements. See the NOTICE file @REM distributed with this work for additional information @REM regarding copyright ownership. The ASF licenses this file @REM to you under the Apache License, Version 2.0 (the @REM "License"); you may not use this file except in compliance @REM with the License. You may obtain a copy of the License at @REM @REM https://www.apache.org/licenses/LICENSE-2.0 @REM @REM Unless required by applicable law or agreed to in writing, @REM software distributed under the License is distributed on an @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY @REM KIND, either express or implied. See the License for the @REM specific language governing permissions and limitations @REM under the License. @REM ---------------------------------------------------------------------------- @REM ---------------------------------------------------------------------------- @REM Maven2 Start Up Batch script @REM @REM Required ENV vars: @REM JAVA_HOME - location of a JDK home dir @REM @REM Optional ENV vars @REM M2_HOME - location of maven2's installed home dir @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven @REM e.g. to debug Maven itself, use @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files @REM ---------------------------------------------------------------------------- @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' @echo off @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% @REM set %HOME% to equivalent of $HOME if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") @REM Execute a user defined script before this one if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre @REM check for pre script, once with legacy .bat ending and once with .cmd ending if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" :skipRcPre @setlocal set ERROR_CODE=0 @REM To isolate internal variables from possible post scripts, we use another setlocal @setlocal @REM ==== START VALIDATION ==== if not "%JAVA_HOME%" == "" goto OkJHome echo. echo Error: JAVA_HOME not found in your environment. >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error :OkJHome if exist "%JAVA_HOME%\bin\java.exe" goto init echo. echo Error: JAVA_HOME is set to an invalid directory. >&2 echo JAVA_HOME = "%JAVA_HOME%" >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error @REM ==== END VALIDATION ==== :init set MAVEN_CMD_LINE_ARGS=%* @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". @REM Fallback to current working directory if not found. set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir set EXEC_DIR=%CD% set WDIR=%EXEC_DIR% :findBaseDir IF EXIST "%WDIR%"\.mvn goto baseDirFound cd .. IF "%WDIR%"=="%CD%" goto baseDirNotFound set WDIR=%CD% goto findBaseDir :baseDirFound set MAVEN_PROJECTBASEDIR=%WDIR% cd "%EXEC_DIR%" goto endDetectBaseDir :baseDirNotFound set MAVEN_PROJECTBASEDIR=%EXEC_DIR% cd "%EXEC_DIR%" :endDetectBaseDir IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig @setlocal EnableExtensions EnableDelayedExpansion for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% :endReadAdditionalConfig SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" set WRAPPER_JAR="".\.mvn\wrapper\maven-wrapper.jar"" set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CMD_LINE_ARGS% if ERRORLEVEL 1 goto error goto end :error set ERROR_CODE=1 :end @endlocal & set ERROR_CODE=%ERROR_CODE% if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost @REM check for post script, once with legacy .bat ending and once with .cmd ending if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" :skipRcPost @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' if "%MAVEN_BATCH_PAUSE%" == "on" pause if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% exit /B %ERROR_CODE% ================================================ FILE: spring-cloud-task-samples/task-events/pom.xml ================================================ 4.0.0 io.spring.cloud task-events 5.0.0-SNAPSHOT jar Task Events Demo of publishing task events to Spring Cloud Streams org.springframework.boot spring-boot-starter-parent 4.0.2 UTF-8 17 5.0.2-SNAPSHOT org.springframework.cloud spring-cloud-task-dependencies ${project.version} pom import org.springframework.cloud spring-cloud-stream-dependencies ${spring-cloud-stream.version} pom import org.springframework.cloud spring-cloud-starter-task org.springframework.cloud spring-cloud-starter-stream-rabbit compile org.springframework.boot spring-boot-starter-test test com.h2database h2 org.springframework.boot spring-boot-maven-plugin org.apache.maven.plugins maven-javadoc-plugin attach-javadocs jar org.apache.maven.plugins maven-source-plugin attach-sources jar maven-deploy-plugin true spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false ================================================ FILE: spring-cloud-task-samples/task-events/src/main/java/io/spring/TaskEventsApplication.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @EnableTask @SpringBootApplication public class TaskEventsApplication { public static void main(String[] args) { SpringApplication.run(TaskEventsApplication.class, args); } @Configuration(proxyBeanMethods = false) public static class TaskConfiguration { @Bean public CommandLineRunner commandLineRunner() { return new CommandLineRunner() { @Override public void run(String... args) throws Exception { System.out.println("The CommandLineRunner was executed"); } }; } } } ================================================ FILE: spring-cloud-task-samples/task-events/src/main/java/io/spring/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Task events sample application for Spring Cloud Task. */ package io.spring; ================================================ FILE: spring-cloud-task-samples/task-events/src/main/resources/application.properties ================================================ spring.application.name=Event Emitting Task logging.level.org.springframework.cloud.stream=DEBUG logging.level.org.springframework.cloud.task=DEBUG ================================================ FILE: spring-cloud-task-samples/task-observations/.gitignore ================================================ HELP.md target/ !.mvn/wrapper/maven-wrapper.jar !**/src/main/**/target/ !**/src/test/**/target/ ### STS ### .apt_generated .classpath .factorypath .project .settings .springBeans .sts4-cache ### IntelliJ IDEA ### .idea *.iws *.iml *.ipr ### NetBeans ### /nbproject/private/ /nbbuild/ /dist/ /nbdist/ /.nb-gradle/ build/ !**/src/main/**/build/ !**/src/test/**/build/ ### VS Code ### .vscode/ ================================================ FILE: spring-cloud-task-samples/task-observations/.mvn/wrapper/maven-wrapper.properties ================================================ distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.4/apache-maven-3.8.4-bin.zip wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar ================================================ FILE: spring-cloud-task-samples/task-observations/README.adoc ================================================ = Timestamp Task This is a Spring Cloud Task application that utilizes Micrometer and displays metrics at the end of the application using the SimpleMeterRegistry. == Requirements: * Java 17 or Above == Classes: * TaskObservationsApplication - the Spring Boot Main Application == Build: [source,shell] ---- mvn clean package ---- == Run: [source,shell] ---- java -jar target/task-observations-5.0.0.jar ---- == Native Build: [source,shell] ---- mvn -Pnative clean package ---- == Native Run: [source,shell] ---- ./target/task-observations ---- ================================================ FILE: spring-cloud-task-samples/task-observations/mvnw ================================================ #!/bin/sh # ---------------------------------------------------------------------------- # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # Maven Start Up Batch script # # Required ENV vars: # ------------------ # JAVA_HOME - location of a JDK home dir # # Optional ENV vars # ----------------- # M2_HOME - location of maven2's installed home dir # MAVEN_OPTS - parameters passed to the Java VM when running Maven # e.g. to debug Maven itself, use # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 # MAVEN_SKIP_RC - flag to disable loading of mavenrc files # ---------------------------------------------------------------------------- if [ -z "$MAVEN_SKIP_RC" ] ; then if [ -f /usr/local/etc/mavenrc ] ; then . /usr/local/etc/mavenrc fi if [ -f /etc/mavenrc ] ; then . /etc/mavenrc fi if [ -f "$HOME/.mavenrc" ] ; then . "$HOME/.mavenrc" fi fi # OS specific support. $var _must_ be set to either true or false. cygwin=false; darwin=false; mingw=false case "`uname`" in CYGWIN*) cygwin=true ;; MINGW*) mingw=true;; Darwin*) darwin=true # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home # See https://developer.apple.com/library/mac/qa/qa1170/_index.html if [ -z "$JAVA_HOME" ]; then if [ -x "/usr/libexec/java_home" ]; then export JAVA_HOME="`/usr/libexec/java_home`" else export JAVA_HOME="/Library/Java/Home" fi fi ;; esac if [ -z "$JAVA_HOME" ] ; then if [ -r /etc/gentoo-release ] ; then JAVA_HOME=`java-config --jre-home` fi fi if [ -z "$M2_HOME" ] ; then ## resolve links - $0 may be a link to maven's home PRG="$0" # need this for relative symlinks while [ -h "$PRG" ] ; do ls=`ls -ld "$PRG"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then PRG="$link" else PRG="`dirname "$PRG"`/$link" fi done saveddir=`pwd` M2_HOME=`dirname "$PRG"`/.. # make it fully qualified M2_HOME=`cd "$M2_HOME" && pwd` cd "$saveddir" # echo Using m2 at $M2_HOME fi # For Cygwin, ensure paths are in UNIX format before anything is touched if $cygwin ; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --unix "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"` fi # For Mingw, ensure paths are in UNIX format before anything is touched if $mingw ; then [ -n "$M2_HOME" ] && M2_HOME="`(cd "$M2_HOME"; pwd)`" [ -n "$JAVA_HOME" ] && JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" fi if [ -z "$JAVA_HOME" ]; then javaExecutable="`which javac`" if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then # readlink(1) is not available as standard on Solaris 10. readLink=`which readlink` if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then if $darwin ; then javaHome="`dirname \"$javaExecutable\"`" javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" else javaExecutable="`readlink -f \"$javaExecutable\"`" fi javaHome="`dirname \"$javaExecutable\"`" javaHome=`expr "$javaHome" : '\(.*\)/bin'` JAVA_HOME="$javaHome" export JAVA_HOME fi fi fi if [ -z "$JAVACMD" ] ; then if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD="$JAVA_HOME/jre/sh/java" else JAVACMD="$JAVA_HOME/bin/java" fi else JAVACMD="`\\unset -f command; \\command -v java`" fi fi if [ ! -x "$JAVACMD" ] ; then echo "Error: JAVA_HOME is not defined correctly." >&2 echo " We cannot execute $JAVACMD" >&2 exit 1 fi if [ -z "$JAVA_HOME" ] ; then echo "Warning: JAVA_HOME environment variable is not set." fi CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher # traverses directory structure from process work directory to filesystem root # first directory with .mvn subdirectory is considered project base directory find_maven_basedir() { if [ -z "$1" ] then echo "Path not specified to find_maven_basedir" return 1 fi basedir="$1" wdir="$1" while [ "$wdir" != '/' ] ; do if [ -d "$wdir"/.mvn ] ; then basedir=$wdir break fi # workaround for JBEAP-8937 (on Solaris 10/Sparc) if [ -d "${wdir}" ]; then wdir=`cd "$wdir/.."; pwd` fi # end of workaround done echo "${basedir}" } # concatenates all lines of a file concat_lines() { if [ -f "$1" ]; then echo "$(tr -s '\n' ' ' < "$1")" fi } BASE_DIR=`find_maven_basedir "$(pwd)"` if [ -z "$BASE_DIR" ]; then exit 1; fi ########################################################################################## # Extension to allow automatically downloading the maven-wrapper.jar from Maven-central # This allows using the maven wrapper in projects that prohibit checking in binary data. ########################################################################################## if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then if [ "$MVNW_VERBOSE" = true ]; then echo "Found .mvn/wrapper/maven-wrapper.jar" fi else if [ "$MVNW_VERBOSE" = true ]; then echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." fi if [ -n "$MVNW_REPOURL" ]; then jarUrl="$MVNW_REPOURL/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar" else jarUrl="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar" fi while IFS="=" read key value; do case "$key" in (wrapperUrl) jarUrl="$value"; break ;; esac done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" if [ "$MVNW_VERBOSE" = true ]; then echo "Downloading from: $jarUrl" fi wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" if $cygwin; then wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"` fi if command -v wget > /dev/null; then if [ "$MVNW_VERBOSE" = true ]; then echo "Found wget ... using wget" fi if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then wget "$jarUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath" else wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath" fi elif command -v curl > /dev/null; then if [ "$MVNW_VERBOSE" = true ]; then echo "Found curl ... using curl" fi if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then curl -o "$wrapperJarPath" "$jarUrl" -f else curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f fi else if [ "$MVNW_VERBOSE" = true ]; then echo "Falling back to using Java to download" fi javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" # For Cygwin, switch paths to Windows format before running javac if $cygwin; then javaClass=`cygpath --path --windows "$javaClass"` fi if [ -e "$javaClass" ]; then if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then if [ "$MVNW_VERBOSE" = true ]; then echo " - Compiling MavenWrapperDownloader.java ..." fi # Compiling the Java class ("$JAVA_HOME/bin/javac" "$javaClass") fi if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then # Running the downloader if [ "$MVNW_VERBOSE" = true ]; then echo " - Running MavenWrapperDownloader.java ..." fi ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") fi fi fi fi ########################################################################################## # End of extension ########################################################################################## export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} if [ "$MVNW_VERBOSE" = true ]; then echo $MAVEN_PROJECTBASEDIR fi MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" # For Cygwin, switch paths to Windows format before running java if $cygwin; then [ -n "$M2_HOME" ] && M2_HOME=`cygpath --path --windows "$M2_HOME"` [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"` [ -n "$MAVEN_PROJECTBASEDIR" ] && MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` fi # Provide a "standardized" way to retrieve the CLI args that will # work with both Windows and non-Windows executions. MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" export MAVEN_CMD_LINE_ARGS WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain exec "$JAVACMD" \ $MAVEN_OPTS \ $MAVEN_DEBUG_OPTS \ -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ "-Dmaven.home=${M2_HOME}" \ "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" ================================================ FILE: spring-cloud-task-samples/task-observations/mvnw.cmd ================================================ @REM ---------------------------------------------------------------------------- @REM Licensed to the Apache Software Foundation (ASF) under one @REM or more contributor license agreements. See the NOTICE file @REM distributed with this work for additional information @REM regarding copyright ownership. The ASF licenses this file @REM to you under the Apache License, Version 2.0 (the @REM "License"); you may not use this file except in compliance @REM with the License. You may obtain a copy of the License at @REM @REM https://www.apache.org/licenses/LICENSE-2.0 @REM @REM Unless required by applicable law or agreed to in writing, @REM software distributed under the License is distributed on an @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY @REM KIND, either express or implied. See the License for the @REM specific language governing permissions and limitations @REM under the License. @REM ---------------------------------------------------------------------------- @REM ---------------------------------------------------------------------------- @REM Maven Start Up Batch script @REM @REM Required ENV vars: @REM JAVA_HOME - location of a JDK home dir @REM @REM Optional ENV vars @REM M2_HOME - location of maven2's installed home dir @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven @REM e.g. to debug Maven itself, use @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files @REM ---------------------------------------------------------------------------- @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' @echo off @REM set title of command window title %0 @REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% @REM set %HOME% to equivalent of $HOME if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") @REM Execute a user defined script before this one if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre @REM check for pre script, once with legacy .bat ending and once with .cmd ending if exist "%USERPROFILE%\mavenrc_pre.bat" call "%USERPROFILE%\mavenrc_pre.bat" %* if exist "%USERPROFILE%\mavenrc_pre.cmd" call "%USERPROFILE%\mavenrc_pre.cmd" %* :skipRcPre @setlocal set ERROR_CODE=0 @REM To isolate internal variables from possible post scripts, we use another setlocal @setlocal @REM ==== START VALIDATION ==== if not "%JAVA_HOME%" == "" goto OkJHome echo. echo Error: JAVA_HOME not found in your environment. >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error :OkJHome if exist "%JAVA_HOME%\bin\java.exe" goto init echo. echo Error: JAVA_HOME is set to an invalid directory. >&2 echo JAVA_HOME = "%JAVA_HOME%" >&2 echo Please set the JAVA_HOME variable in your environment to match the >&2 echo location of your Java installation. >&2 echo. goto error @REM ==== END VALIDATION ==== :init @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". @REM Fallback to current working directory if not found. set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir set EXEC_DIR=%CD% set WDIR=%EXEC_DIR% :findBaseDir IF EXIST "%WDIR%"\.mvn goto baseDirFound cd .. IF "%WDIR%"=="%CD%" goto baseDirNotFound set WDIR=%CD% goto findBaseDir :baseDirFound set MAVEN_PROJECTBASEDIR=%WDIR% cd "%EXEC_DIR%" goto endDetectBaseDir :baseDirNotFound set MAVEN_PROJECTBASEDIR=%EXEC_DIR% cd "%EXEC_DIR%" :endDetectBaseDir IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig @setlocal EnableExtensions EnableDelayedExpansion for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% :endReadAdditionalConfig SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar" FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B ) @REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central @REM This allows using the maven wrapper in projects that prohibit checking in binary data. if exist %WRAPPER_JAR% ( if "%MVNW_VERBOSE%" == "true" ( echo Found %WRAPPER_JAR% ) ) else ( if not "%MVNW_REPOURL%" == "" ( SET DOWNLOAD_URL="%MVNW_REPOURL%/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar" ) if "%MVNW_VERBOSE%" == "true" ( echo Couldn't find %WRAPPER_JAR%, downloading it ... echo Downloading from: %DOWNLOAD_URL% ) powershell -Command "&{"^ "$webclient = new-object System.Net.WebClient;"^ "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ "}"^ "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ "}" if "%MVNW_VERBOSE%" == "true" ( echo Finished downloading %WRAPPER_JAR% ) ) @REM End of extension @REM Provide a "standardized" way to retrieve the CLI args that will @REM work with both Windows and non-Windows executions. set MAVEN_CMD_LINE_ARGS=%* %MAVEN_JAVA_EXE% ^ %JVM_CONFIG_MAVEN_PROPS% ^ %MAVEN_OPTS% ^ %MAVEN_DEBUG_OPTS% ^ -classpath %WRAPPER_JAR% ^ "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" ^ %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* if ERRORLEVEL 1 goto error goto end :error set ERROR_CODE=1 :end @endlocal & set ERROR_CODE=%ERROR_CODE% if not "%MAVEN_SKIP_RC%"=="" goto skipRcPost @REM check for post script, once with legacy .bat ending and once with .cmd ending if exist "%USERPROFILE%\mavenrc_post.bat" call "%USERPROFILE%\mavenrc_post.bat" if exist "%USERPROFILE%\mavenrc_post.cmd" call "%USERPROFILE%\mavenrc_post.cmd" :skipRcPost @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' if "%MAVEN_BATCH_PAUSE%"=="on" pause if "%MAVEN_TERMINATE_CMD%"=="on" exit %ERROR_CODE% cmd /C exit /B %ERROR_CODE% ================================================ FILE: spring-cloud-task-samples/task-observations/pom.xml ================================================ 4.0.0 io.spring task-observations 5.0.0-SNAPSHOT task observation sample Displays task observations as well as commandline and application runner observations 17 org.springframework.boot spring-boot-starter-parent 4.0.2 org.springframework.cloud spring-cloud-task-dependencies ${project.version} pom import org.springframework.boot spring-boot-starter-test test org.springframework.cloud spring-cloud-starter-task org.springframework.boot spring-boot-configuration-processor true org.springframework.boot spring-boot-starter-jdbc org.springframework.boot spring-boot-starter-actuator io.micrometer micrometer-core io.micrometer micrometer-observation com.h2database h2 org.mariadb.jdbc mariadb-java-client spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false org.springframework.boot spring-boot-maven-plugin org.apache.maven.plugins maven-javadoc-plugin attach-javadocs jar org.apache.maven.plugins maven-source-plugin attach-sources jar maven-deploy-plugin true ================================================ FILE: spring-cloud-task-samples/task-observations/src/main/java/io/spring/taskobservations/ObservationConfiguration.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.taskobservations; import io.micrometer.core.instrument.simple.SimpleMeterRegistry; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration public class ObservationConfiguration { @Bean public SimpleMeterRegistry meterRegistry() { return new SimpleMeterRegistry(); } } ================================================ FILE: spring-cloud-task-samples/task-observations/src/main/java/io/spring/taskobservations/TaskObservationsApplication.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.taskobservations; import io.micrometer.core.instrument.simple.SimpleMeterRegistry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.ApplicationRunner; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.cloud.task.listener.annotation.AfterTask; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.context.annotation.Bean; @SpringBootApplication @EnableTask public class TaskObservationsApplication { private static final Log logger = LogFactory.getLog(TaskObservationsApplication.class); @Autowired public SimpleMeterRegistry simpleMeterRegistry; public static void main(String[] args) { SpringApplication.run(TaskObservationsApplication.class, args); } @Bean public ApplicationRunner applicationRunner() { return args -> logger.info("Hello ApplicationRunner Metric's World"); } @Bean public CommandLineRunner commandLineRunner() { return args -> logger.info("Hello CommandLineRunner Metric's World"); } /** * Prints the metrics as recorded in the simpleMeterRegistry. */ @AfterTask public void afterTask(TaskExecution taskExecution) { System.out.println(simpleMeterRegistry.getMetersAsString()); } } ================================================ FILE: spring-cloud-task-samples/task-observations/src/main/java/io/spring/taskobservations/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Task observations sample application for Spring Cloud Task. */ package io.spring.taskobservations; ================================================ FILE: spring-cloud-task-samples/task-observations/src/main/resources/application.properties ================================================ logging.level.org.springframework.cloud.task=debug management.metrics.tags.service=task-observations-application management.metrics.tags.application=task-observations-application-58 spring.cloud.task.name=taskmetrics spring.cloud.task.observation.enabled=true ================================================ FILE: spring-cloud-task-samples/task-observations/src/test/java/io/spring/taskobservations/TaskObservationsApplicationTests.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.spring.taskobservations; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.system.CapturedOutput; import org.springframework.boot.test.system.OutputCaptureExtension; import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; @SpringBootTest @ExtendWith(OutputCaptureExtension.class) class TaskObservationsApplicationTests { @Test void contextLoads(CapturedOutput output) { String result = output.getAll(); assertThat(result).contains("spring.cloud.task(TIMER)[application='task-observations-application-58', " + "error='none', service='task-observations-application', " + "spring.cloud.task.execution.id='1', spring.cloud.task.exit.code='0', " + "spring.cloud.task.external.execution.id='unknown', spring.cloud.task.name='taskmetrics', " + "spring.cloud.task.parent.execution.id='unknown', spring.cloud.task.status='success']; " + "count=1.0, total_time="); } } ================================================ FILE: spring-cloud-task-samples/timestamp/README.adoc ================================================ = Timestamp Task This is a Spring Cloud Task application that logs a timestamp. == Requirements: * Java 17 or Above == Classes: * TaskApplication - the Spring Boot Main Application * TimestampTask - the module that writes the log entry as Spring Cloud Task == Build: [source,shell] ---- mvn clean package ---- == Run: [source,shell] ---- java -jar target/timestamp-task-5.0.0.jar ---- == Native Build: [source,shell] ---- mvn -Pnative native:compile ---- == Native Run: [source,shell] ---- ./target/timestamp-task ---- ================================================ FILE: spring-cloud-task-samples/timestamp/pom.xml ================================================ 4.0.0 io.spring.cloud timestamp-task jar Timestamp Task 5.0.0-SNAPSHOT Spring Cloud Timestamp Task org.springframework.boot spring-boot-starter-parent 4.0.2 org.springframework.cloud.task.timestamp.TaskApplication true org.springframework.cloud spring-cloud-task-dependencies ${project.version} pom import org.springframework.boot spring-boot-starter-test test org.springframework.cloud spring-cloud-task-core org.springframework.boot spring-boot-configuration-processor true org.springframework.boot spring-boot-starter-jdbc com.h2database h2 org.mariadb.jdbc mariadb-java-client org.postgresql postgresql spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false spring-snapshots Spring Snapshots https://repo.spring.io/snapshot true spring-milestones Spring Milestones https://repo.spring.io/milestone false spring-releases Spring Releases https://repo.spring.io/release false org.springframework.boot spring-boot-maven-plugin org.apache.maven.plugins maven-javadoc-plugin attach-javadocs jar org.apache.maven.plugins maven-source-plugin attach-sources jar maven-deploy-plugin true ================================================ FILE: spring-cloud-task-samples/timestamp/src/main/java/org/springframework/cloud/task/timestamp/TaskApplication.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.timestamp; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.context.annotation.Bean; /** * Spring Boot Application that has tasks enabled. */ @EnableTask @SpringBootApplication @EnableConfigurationProperties({ TimestampTaskProperties.class }) public class TaskApplication { private static final Log logger = LogFactory.getLog(TaskApplication.class); public static void main(String[] args) { SpringApplication.run(TaskApplication.class, args); } @Bean public TimestampTask timeStampTask() { return new TimestampTask(); } /** * A commandline runner that prints a timestamp. */ public static class TimestampTask implements CommandLineRunner { @Autowired private TimestampTaskProperties config; @Override public void run(String... strings) { DateFormat dateFormat = new SimpleDateFormat(this.config.getFormat()); logger.info(dateFormat.format(new Date())); } } } ================================================ FILE: spring-cloud-task-samples/timestamp/src/main/java/org/springframework/cloud/task/timestamp/TimestampTaskProperties.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.timestamp; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.util.Assert; /** * @author Glenn Renfro */ @ConfigurationProperties public class TimestampTaskProperties { /** * The timestamp format, "yyyy-MM-dd HH:mm:ss.SSS" by default. */ private String format = "yyyy-MM-dd HH:mm:ss.SSS"; public String getFormat() { Assert.hasText(this.format, "format must not be empty nor null"); return this.format; } public void setFormat(String format) { this.format = format; } } ================================================ FILE: spring-cloud-task-samples/timestamp/src/main/java/org/springframework/cloud/task/timestamp/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Sample timestamp task application for Spring Cloud Task. */ package org.springframework.cloud.task.timestamp; ================================================ FILE: spring-cloud-task-samples/timestamp/src/main/resources/application.properties ================================================ spring.application.name=Demo Timestamp Task logging.level.org.springframework.cloud.task=DEBUG #spring.aop.proxy-target-class=false ================================================ FILE: spring-cloud-task-samples/timestamp/src/test/java/org/springframework/cloud/task/timestamp/TaskApplicationTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.timestamp; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.boot.SpringApplication; import org.springframework.boot.test.system.CapturedOutput; import org.springframework.boot.test.system.OutputCaptureExtension; import static org.assertj.core.api.Assertions.assertThat; /** * Verifies that the Task Application outputs the correct task log entries. * * @author Glenn Renfro */ @ExtendWith(OutputCaptureExtension.class) public class TaskApplicationTests { @Test public void testTimeStampApp(CapturedOutput capturedOutput) throws Exception { final String TEST_DATE_DOTS = "......."; final String CREATE_TASK_MESSAGE = "Creating: TaskExecution{executionId="; final String UPDATE_TASK_MESSAGE = "Updating: TaskExecution with executionId="; final String EXIT_CODE_MESSAGE = "with the following {exitCode=0"; String[] args = { "--format=yyyy" + TEST_DATE_DOTS }; SpringApplication.run(TaskApplication.class, args); String output = capturedOutput.toString(); assertThat(output.contains(TEST_DATE_DOTS)).as("Unable to find the timestamp: " + output).isTrue(); assertThat(output.contains(CREATE_TASK_MESSAGE)).as("Test results do not show create task message: " + output) .isTrue(); assertThat(output.contains(UPDATE_TASK_MESSAGE)).as("Test results do not show success message: " + output) .isTrue(); assertThat(output.contains(EXIT_CODE_MESSAGE)).as("Test results have incorrect exit code: " + output).isTrue(); String taskTitle = " taskName='Demo Timestamp Task'"; Pattern pattern = Pattern.compile(taskTitle); Matcher matcher = pattern.matcher(output); int count = 0; while (matcher.find()) { count++; } assertThat(count).as("The number of task titles did not match expected: ").isEqualTo(1); } } ================================================ FILE: spring-cloud-task-samples/timestamp/src/test/java/org/springframework/cloud/task/timestamp/TimestampTaskPropertiesTests.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.timestamp; import org.junit.jupiter.api.Test; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.util.TestPropertyValues; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Configuration; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; /** * @author Glenn Renfro */ public class TimestampTaskPropertiesTests { @Test public void testEmptyFormat() { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); TestPropertyValues testPropertyValues = TestPropertyValues.of("format:"); testPropertyValues.applyTo(context); context.register(Conf.class); context.refresh(); TimestampTaskProperties properties = context.getBean(TimestampTaskProperties.class); assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { properties.getFormat(); }); } @Test public void testFormatDefault() { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); context.register(Conf.class); context.refresh(); TimestampTaskProperties properties = context.getBean(TimestampTaskProperties.class); assertThat(properties.getFormat()).as("result does not match default format.") .isEqualTo("yyyy-MM-dd HH:mm:ss.SSS"); } @Test public void testFormatSet() { final String FORMAT = "yyyy"; AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); context.register(Conf.class); context.refresh(); TimestampTaskProperties properties = context.getBean(TimestampTaskProperties.class); properties.setFormat(FORMAT); assertThat(properties.getFormat()).as("result does not match established format.").isEqualTo(FORMAT); } @Configuration(proxyBeanMethods = false) @EnableConfigurationProperties(TimestampTaskProperties.class) static class Conf { } } ================================================ FILE: spring-cloud-task-samples/timestamp/src/test/resources/application.properties ================================================ # # Copyright 2016-2019 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # logging.level.org.springframework.cloud.task=DEBUG spring.application.name=Demo Timestamp Task ================================================ FILE: spring-cloud-task-stream/.mvn/wrapper/maven-wrapper.properties ================================================ distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.6.0/apache-maven-3.6.0-bin.zip ================================================ FILE: spring-cloud-task-stream/pom.xml ================================================ 4.0.0 spring-cloud-task-stream jar Spring Cloud Task Stream Allows a Task to be a part of a stream org.springframework.cloud spring-cloud-task-parent 5.0.2-SNAPSHOT org.springframework.batch spring-batch-core true org.springframework spring-core org.springframework.cloud spring-cloud-stream true org.springframework.cloud spring-cloud-task-core tools.jackson.core jackson-databind test true org.springframework.boot spring-boot-starter-test test org.springframework.boot spring-boot-starter test org.springframework.cloud spring-cloud-starter-stream-rabbit test org.springframework.cloud spring-cloud-task-batch test com.h2database h2 test org.springframework.boot spring-boot-configuration-processor true org.springframework.boot spring-boot-autoconfigure-processor true org.springframework.cloud spring-cloud-stream-test-binder ${spring-cloud-stream.version} test org.assertj assertj-core test ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/BatchEventAutoConfiguration.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import org.springframework.batch.core.job.Job; import org.springframework.batch.core.listener.ItemProcessListener; import org.springframework.batch.core.listener.ItemReadListener; import org.springframework.batch.core.listener.ItemWriteListener; import org.springframework.batch.core.listener.JobExecutionListener; import org.springframework.batch.core.listener.SkipListener; import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.stream.function.StreamBridge; import org.springframework.cloud.task.batch.listener.support.MessagePublisher; import org.springframework.cloud.task.batch.listener.support.TaskBatchEventListenerBeanPostProcessor; import org.springframework.cloud.task.batch.listener.support.TaskEventProperties; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.cloud.task.listener.TaskLifecycleListener; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Lazy; /** * Autoconfigures Spring Batch listeners designed to emit events on the following * channels. * *
    *
  • {@link EventEmittingJobExecutionListener} - job-execution-events
  • *
  • {@link EventEmittingStepExecutionListener} - step-execution-events
  • *
  • {@link org.springframework.batch.core.listener.ChunkListener} - chunk-events
  • *
  • {@link EventEmittingItemReadListener} - item-read-events
  • *
  • {@link EventEmittingItemProcessListener} - item-process-events
  • *
  • {@link EventEmittingItemWriteListener} - item-write-events
  • *
  • {@link EventEmittingSkipListener} - skip-events
  • *
* * @author Michael Minella * @author Glenn Renfro * @author Ali Shahbour */ // @checkstyle:off @AutoConfiguration(after = SimpleTaskAutoConfiguration.class) @ConditionalOnClass(Job.class) @ConditionalOnBean({ Job.class, TaskLifecycleListener.class }) @ConditionalOnProperty(prefix = "spring.cloud.task.batch.events", name = "enabled", havingValue = "true", matchIfMissing = true) // @checkstyle:on public class BatchEventAutoConfiguration { /** * Name of the job execution events listener bean. */ public static final String JOB_EXECUTION_EVENTS_LISTENER = "jobExecutionEventsListener"; /** * Name of the chunk events listener bean. */ public static final String CHUNK_EVENTS_LISTENER = "chunkEventsListener"; /** * Name of the step execution events listener bean. */ public static final String STEP_EXECUTION_EVENTS_LISTENER = "stepExecutionEventsListener"; /** * Name of the item read events listener bean. */ public static final String ITEM_READ_EVENTS_LISTENER = "itemReadEventsListener"; /** * Name of the item write events listener bean. */ public static final String ITEM_WRITE_EVENTS_LISTENER = "itemWriteEventsListener"; /** * Name of the item process events listener bean. */ public static final String ITEM_PROCESS_EVENTS_LISTENER = "itemProcessEventsListener"; /** * Name of the skip events listener bean. */ public static final String SKIP_EVENTS_LISTENER = "skipEventsListener"; @Bean @ConditionalOnMissingBean public static TaskBatchEventListenerBeanPostProcessor batchTaskEventListenerBeanPostProcessor() { return new TaskBatchEventListenerBeanPostProcessor(); } /** * Configuration for Job Execution Listener. */ @AutoConfiguration @ConditionalOnClass(StreamBridge.class) @EnableConfigurationProperties(TaskEventProperties.class) @ConditionalOnMissingBean(name = JOB_EXECUTION_EVENTS_LISTENER) @ConditionalOnExpression("T(org.springframework.util.StringUtils).isEmpty('${spring.batch.job.jobName:}')") public static class JobExecutionListenerConfiguration { @Autowired private TaskEventProperties taskEventProperties; // @checkstyle:off @Bean @Lazy @ConditionalOnProperty(prefix = "spring.cloud.task.batch.events.job-execution", name = "enabled", havingValue = "true", matchIfMissing = true) // @checkstyle:on public JobExecutionListener jobExecutionEventsListener(MessagePublisher messagePublisher, TaskEventProperties properties) { return new EventEmittingJobExecutionListener(messagePublisher, this.taskEventProperties.getJobExecutionOrder(), properties); } // @checkstyle:off @Bean @ConditionalOnProperty(prefix = "spring.cloud.task.batch.events.step-execution", name = "enabled", havingValue = "true", matchIfMissing = true) // @checkstyle:on public StepExecutionListener stepExecutionEventsListener(MessagePublisher messagePublisher, TaskEventProperties properties) { return new EventEmittingStepExecutionListener(messagePublisher, this.taskEventProperties.getStepExecutionOrder(), properties); } // @checkstyle:off @Bean @Lazy @ConditionalOnProperty(prefix = "spring.cloud.task.batch.events.chunk", name = "enabled", havingValue = "true", matchIfMissing = true) // @checkstyle:on public EventEmittingChunkListener chunkEventsListener(MessagePublisher messagePublisher, TaskEventProperties properties) { return new EventEmittingChunkListener(messagePublisher, this.taskEventProperties.getChunkOrder(), properties); } // @checkstyle:off @Bean @ConditionalOnProperty(prefix = "spring.cloud.task.batch.events.item-read", name = "enabled", havingValue = "true", matchIfMissing = true) // @checkstyle:on public ItemReadListener itemReadEventsListener(MessagePublisher messagePublisher, TaskEventProperties properties) { return new EventEmittingItemReadListener(messagePublisher, this.taskEventProperties.getItemReadOrder(), properties); } // @checkstyle:off @Bean @ConditionalOnProperty(prefix = "spring.cloud.task.batch.events.item-write", name = "enabled", havingValue = "true", matchIfMissing = true) // @checkstyle:on public ItemWriteListener itemWriteEventsListener(MessagePublisher messagePublisher, TaskEventProperties properties) { return new EventEmittingItemWriteListener(messagePublisher, this.taskEventProperties.getItemWriteOrder(), properties); } // @checkstyle:off @Bean @ConditionalOnProperty(prefix = "spring.cloud.task.batch.events.item-process", name = "enabled", havingValue = "true", matchIfMissing = true) // @checkstyle:on public ItemProcessListener itemProcessEventsListener(MessagePublisher messagePublisher, TaskEventProperties properties) { return new EventEmittingItemProcessListener(messagePublisher, this.taskEventProperties.getItemProcessOrder(), properties); } // @checkstyle:off @Bean @ConditionalOnProperty(prefix = "spring.cloud.task.batch.events.skip", name = "enabled", havingValue = "true", matchIfMissing = true) // @checkstyle:on public SkipListener skipEventsListener(MessagePublisher messagePublisher, TaskEventProperties properties) { return new EventEmittingSkipListener(messagePublisher, this.taskEventProperties.getItemProcessOrder(), properties); } @Bean public MessagePublisher messagePublisher(StreamBridge streamBridge) { return new MessagePublisher(streamBridge); } } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/EventEmittingChunkListener.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import org.springframework.batch.core.listener.ChunkListener; import org.springframework.batch.infrastructure.item.Chunk; import org.springframework.cloud.task.batch.listener.support.MessagePublisher; import org.springframework.cloud.task.batch.listener.support.TaskEventProperties; import org.springframework.core.Ordered; import org.springframework.util.Assert; /** * Provides informational messages around the {@link Chunk} of a batch job. * * The {@link ChunkListener#beforeChunk(Chunk)} and * {@link ChunkListener#afterChunk(Chunk)} are both no-ops in this implementation. * {@link ChunkListener#onChunkError(Exception,Chunk)}. * * @author Ali Shahbour */ public class EventEmittingChunkListener implements ChunkListener, Ordered { private int order = Ordered.LOWEST_PRECEDENCE; private MessagePublisher messagePublisher; private TaskEventProperties properties; public EventEmittingChunkListener(MessagePublisher messagePublisher, TaskEventProperties properties) { Assert.notNull(messagePublisher, "messagePublisher is required"); Assert.notNull(properties, "properties is required"); this.messagePublisher = messagePublisher; this.properties = properties; } public EventEmittingChunkListener(MessagePublisher messagePublisher, int order, TaskEventProperties properties) { this(messagePublisher, properties); this.order = order; } @Override public void beforeChunk(Chunk chunk) { this.messagePublisher.publish(this.properties.getChunkEventBindingName(), "Before Chunk Processing"); } @Override public void afterChunk(Chunk chunk) { this.messagePublisher.publish(this.properties.getChunkEventBindingName(), "After Chunk Processing"); } @Override public void onChunkError(Exception exception, Chunk chunk) { } @Override public int getOrder() { return this.order; } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/EventEmittingItemProcessListener.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.batch.core.listener.ItemProcessListener; import org.springframework.batch.infrastructure.item.ItemProcessor; import org.springframework.cloud.task.batch.listener.support.BatchJobHeaders; import org.springframework.cloud.task.batch.listener.support.MessagePublisher; import org.springframework.cloud.task.batch.listener.support.TaskEventProperties; import org.springframework.core.Ordered; import org.springframework.util.Assert; /** * Provides informational messages around the {@link ItemProcessListener} of a batch job. * * The {@link ItemProcessListener#beforeProcess(Object)} of this listener is a no-op. * {@link ItemProcessListener#afterProcess(Object, Object)} returns a message if an item * was filtered ({@link ItemProcessor} returned null), if the result of the processor was * equal to the input (via .equals), or if they were not equal. * {@link ItemProcessListener#onProcessError(Object, Exception)} provides the exception * via the {@link BatchJobHeaders#BATCH_EXCEPTION} message header. * * @author Michael Minella * @author Glenn Renfro * @author Ali Shahbour */ public class EventEmittingItemProcessListener implements ItemProcessListener, Ordered { private static final Log logger = LogFactory.getLog(EventEmittingItemProcessListener.class); private MessagePublisher messagePublisher; private int order = Ordered.LOWEST_PRECEDENCE; private TaskEventProperties properties; public EventEmittingItemProcessListener(MessagePublisher messagePublisher, TaskEventProperties properties) { Assert.notNull(messagePublisher, "messagePublisher is required"); Assert.notNull(properties, "properties is required"); this.messagePublisher = messagePublisher; this.properties = properties; } public EventEmittingItemProcessListener(MessagePublisher messagePublisher, int order, TaskEventProperties properties) { this(messagePublisher, properties); this.order = order; } @Override public void beforeProcess(Object item) { } @Override public void afterProcess(Object item, Object result) { if (result == null) { this.messagePublisher.publish(this.properties.getItemProcessEventBindingName(), "1 item was filtered"); } else if (item.equals(result)) { this.messagePublisher.publish(this.properties.getItemProcessEventBindingName(), "item equaled result after processing"); } else { this.messagePublisher.publish(this.properties.getItemProcessEventBindingName(), "item did not equal result after processing"); } } @Override public void onProcessError(Object item, Exception e) { if (logger.isDebugEnabled()) { logger.debug("Executing onProcessError: " + e.getMessage(), e); } this.messagePublisher.publishWithThrowableHeader(this.properties.getItemProcessEventBindingName(), "Exception while item was being processed", e.getMessage()); } @Override public int getOrder() { return this.order; } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/EventEmittingItemReadListener.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.batch.core.listener.ItemReadListener; import org.springframework.batch.infrastructure.item.ItemReader; import org.springframework.cloud.task.batch.listener.support.BatchJobHeaders; import org.springframework.cloud.task.batch.listener.support.MessagePublisher; import org.springframework.cloud.task.batch.listener.support.TaskEventProperties; import org.springframework.core.Ordered; import org.springframework.util.Assert; /** * Provides informational messages around the {@link ItemReader} of a batch job. * * The {@link ItemReadListener#beforeRead()} and * {@link ItemReadListener#afterRead(Object)} are both no-ops in this implementation. * {@link ItemReadListener#onReadError(Exception)} provides the exception via the * {@link BatchJobHeaders#BATCH_EXCEPTION} message header. * * @author Glenn Renfro * @author Ali Shahbour */ public class EventEmittingItemReadListener implements ItemReadListener, Ordered { private static final Log logger = LogFactory.getLog(EventEmittingItemReadListener.class); private int order = Ordered.LOWEST_PRECEDENCE; private final MessagePublisher messagePublisher; private TaskEventProperties properties; public EventEmittingItemReadListener(MessagePublisher messagePublisher, TaskEventProperties properties) { Assert.notNull(messagePublisher, "messagePublisher is required"); Assert.notNull(properties, "properties is required"); this.properties = properties; this.messagePublisher = messagePublisher; } public EventEmittingItemReadListener(MessagePublisher messagePublisher, int order, TaskEventProperties properties) { this(messagePublisher, properties); this.order = order; } @Override public void beforeRead() { } @Override public void afterRead(Object item) { } @Override public void onReadError(Exception ex) { if (logger.isDebugEnabled()) { logger.debug("Executing onReadError: " + ex.getMessage(), ex); } this.messagePublisher.publishWithThrowableHeader(this.properties.getItemReadEventBindingName(), "Exception while item was being read", ex.getMessage()); } @Override public int getOrder() { return this.order; } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/EventEmittingItemWriteListener.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.batch.core.listener.ItemWriteListener; import org.springframework.batch.infrastructure.item.Chunk; import org.springframework.cloud.task.batch.listener.support.BatchJobHeaders; import org.springframework.cloud.task.batch.listener.support.MessagePublisher; import org.springframework.cloud.task.batch.listener.support.TaskEventProperties; import org.springframework.core.Ordered; import org.springframework.util.Assert; /** * Setups up the ItemWriteEventsListener to emit events to the spring cloud stream output * channel. * * Each method provides an informational message. * {@link ItemWriteListener#onWriteError(Exception, Chunk)} provides a message as well as * the exception's message via the {@link BatchJobHeaders#BATCH_EXCEPTION} message header. * * @author Glenn Renfro * @author Ali Shahbour */ public class EventEmittingItemWriteListener implements ItemWriteListener, Ordered { private static final Log logger = LogFactory.getLog(EventEmittingItemWriteListener.class); private int order = Ordered.LOWEST_PRECEDENCE; private final MessagePublisher messagePublisher; private TaskEventProperties properties; public EventEmittingItemWriteListener(MessagePublisher messagePublisher, TaskEventProperties properties) { Assert.notNull(messagePublisher, "messagePublisher is required"); Assert.notNull(properties, "properties is required"); this.messagePublisher = messagePublisher; this.properties = properties; } public EventEmittingItemWriteListener(MessagePublisher messagePublisher, int order, TaskEventProperties properties) { this(messagePublisher, properties); this.order = order; } @Override public void beforeWrite(Chunk items) { this.messagePublisher.publish(this.properties.getItemWriteEventBindingName(), items.size() + " items to be written."); } @Override public void afterWrite(Chunk items) { if (logger.isDebugEnabled()) { logger.debug("Executing afterWrite: " + items); } this.messagePublisher.publish(this.properties.getItemWriteEventBindingName(), items.size() + " items have been written."); } @Override public void onWriteError(Exception exception, Chunk items) { if (logger.isDebugEnabled()) { logger.debug("Executing onWriteError: " + exception.getMessage(), exception); } String payload = "Exception while " + items.size() + " items are attempted to be written."; this.messagePublisher.publishWithThrowableHeader(this.properties.getItemWriteEventBindingName(), payload, exception.getMessage()); } @Override public int getOrder() { return this.order; } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/EventEmittingJobExecutionListener.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.listener.JobExecutionListener; import org.springframework.cloud.task.batch.listener.support.JobExecutionEvent; import org.springframework.cloud.task.batch.listener.support.MessagePublisher; import org.springframework.cloud.task.batch.listener.support.TaskEventProperties; import org.springframework.core.Ordered; import org.springframework.util.Assert; /** * Provides {@link JobExecutionEvent} at both the start and end of the job's execution. * * @author Michael Minella * @author Glenn Renfro * @author Ali Shahbour */ public class EventEmittingJobExecutionListener implements JobExecutionListener, Ordered { private int order = Ordered.LOWEST_PRECEDENCE; private final MessagePublisher messagePublisher; private TaskEventProperties properties; public EventEmittingJobExecutionListener(MessagePublisher messagePublisher, TaskEventProperties properties) { Assert.notNull(messagePublisher, "messagePublisher is required"); Assert.notNull(properties, "properties is required"); this.messagePublisher = messagePublisher; this.properties = properties; } public EventEmittingJobExecutionListener(MessagePublisher messagePublisher, int order, TaskEventProperties properties) { this(messagePublisher, properties); this.order = order; } @Override public void beforeJob(JobExecution jobExecution) { this.messagePublisher.publish(properties.getJobExecutionEventBindingName(), new JobExecutionEvent(jobExecution)); } @Override public void afterJob(JobExecution jobExecution) { this.messagePublisher.publish(properties.getJobExecutionEventBindingName(), new JobExecutionEvent(jobExecution)); } @Override public int getOrder() { return this.order; } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/EventEmittingSkipListener.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.batch.core.listener.SkipListener; import org.springframework.cloud.task.batch.listener.support.BatchJobHeaders; import org.springframework.cloud.task.batch.listener.support.MessagePublisher; import org.springframework.cloud.task.batch.listener.support.TaskEventProperties; import org.springframework.core.Ordered; import org.springframework.util.Assert; /** * Setups up the SkipProcessListener to emit events to the spring cloud stream output * channel. * * This listener emits the exception's message via the * {@link BatchJobHeaders#BATCH_EXCEPTION} message header for each method. For * {@link SkipListener#onSkipInProcess(Object, Throwable)} and * {@link SkipListener#onSkipInWrite(Object, Throwable)} the body of the message consists * of the item that caused the error. * * @author Glenn Renfro * @author Ali Shahbour */ public class EventEmittingSkipListener implements SkipListener, Ordered { private static final Log logger = LogFactory.getLog(EventEmittingSkipListener.class); private final MessagePublisher messagePublisher; private int order = Ordered.LOWEST_PRECEDENCE; private TaskEventProperties properties; public EventEmittingSkipListener(MessagePublisher messagePublisher, TaskEventProperties properties) { Assert.notNull(messagePublisher, "messagePublisher is required"); Assert.notNull(properties, "properties is required"); this.messagePublisher = messagePublisher; this.properties = properties; } public EventEmittingSkipListener(MessagePublisher messagePublisher, int order, TaskEventProperties properties) { this(messagePublisher, properties); this.order = order; } @Override public void onSkipInRead(Throwable t) { if (logger.isDebugEnabled()) { logger.debug("Executing onSkipInRead: " + t.getMessage(), t); } this.messagePublisher.publishWithThrowableHeader(this.properties.getSkipEventBindingName(), "Skipped when reading.", t.getMessage()); } @Override public void onSkipInWrite(Object item, Throwable t) { if (logger.isDebugEnabled()) { logger.debug("Executing onSkipInWrite: " + t.getMessage(), t); } this.messagePublisher.publishWithThrowableHeader(this.properties.getSkipEventBindingName(), item, t.getMessage()); } @Override public void onSkipInProcess(Object item, Throwable t) { if (logger.isDebugEnabled()) { logger.debug("Executing onSkipInProcess: " + t.getMessage(), t); } this.messagePublisher.publishWithThrowableHeader(this.properties.getSkipEventBindingName(), item, t.getMessage()); } @Override public int getOrder() { return this.order; } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/EventEmittingStepExecutionListener.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.step.StepExecution; import org.springframework.cloud.task.batch.listener.support.MessagePublisher; import org.springframework.cloud.task.batch.listener.support.StepExecutionEvent; import org.springframework.cloud.task.batch.listener.support.TaskEventProperties; import org.springframework.core.Ordered; import org.springframework.util.Assert; /** * Provides a {@link StepExecutionEvent} at the start and end of each step indicating the * step's status. The {@link StepExecutionListener#afterStep(StepExecution)} returns the * {@link ExitStatus} of the inputted {@link StepExecution}. * * @author Michael Minella * @author Glenn Renfro * @author Ali Shahbour */ public class EventEmittingStepExecutionListener implements StepExecutionListener, Ordered { private final MessagePublisher messagePublisher; private int order = Ordered.LOWEST_PRECEDENCE; private TaskEventProperties properties; public EventEmittingStepExecutionListener(MessagePublisher messagePublisher, TaskEventProperties properties) { Assert.notNull(messagePublisher, "messagePublisher is required"); Assert.notNull(properties, "properties is required"); this.messagePublisher = messagePublisher; this.properties = properties; } public EventEmittingStepExecutionListener(MessagePublisher messagePublisher, int order, TaskEventProperties properties) { this(messagePublisher, properties); this.order = order; } @Override public void beforeStep(StepExecution stepExecution) { this.messagePublisher.publish(this.properties.getStepExecutionEventBindingName(), new StepExecutionEvent(stepExecution)); } @Override public ExitStatus afterStep(StepExecution stepExecution) { this.messagePublisher.publish(this.properties.getStepExecutionEventBindingName(), new StepExecutionEvent(stepExecution)); return stepExecution.getExitStatus(); } @Override public int getOrder() { return this.order; } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Stream-based batch listener components for Spring Cloud Task. */ package org.springframework.cloud.task.batch.listener; ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/support/BatchJobHeaders.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; /** * Headers definitions used by the batch job plugin. * * @author Gunnar Hillert * @since 1.0 */ public final class BatchJobHeaders { /** * Name of the batch listener event type. */ public static final String BATCH_LISTENER_EVENT_TYPE = "batch_listener_event_type"; /** * Key of the batch exception message. */ public static final String BATCH_EXCEPTION = "batch_exception"; private BatchJobHeaders() { } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/support/ExitStatus.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; import org.springframework.util.Assert; /** * ExitStatus DTO created so that {@link org.springframework.batch.core.ExitStatus} can be * serialized into Json without. having to add mixins to an ObjectMapper * * @author Glenn Renfro */ public class ExitStatus { private String exitCode; private String exitDescription; public ExitStatus() { } public ExitStatus(org.springframework.batch.core.ExitStatus exitStatus) { Assert.notNull(exitStatus, "exitStatus must not be null."); this.exitCode = exitStatus.getExitCode(); this.exitDescription = exitStatus.getExitDescription(); } public String getExitCode() { return this.exitCode; } public void setExitCode(String exitCode) { this.exitCode = exitCode; } public String getExitDescription() { return this.exitDescription; } public void setExitDescription(String exitDescription) { this.exitDescription = exitDescription; } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/support/JobExecutionEvent.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.Entity; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.infrastructure.item.ExecutionContext; /** * This is a JobEvent DTO created so that a {@link JobExecution} can be serialized into * Json without having to add mixins to an ObjectMapper. * * @author Glenn Renfro */ public class JobExecutionEvent extends Entity { private JobParametersEvent jobParameters; private JobInstanceEvent jobInstance; private Collection stepExecutions = Collections.synchronizedList(new ArrayList<>()); private BatchStatus status = BatchStatus.STARTING; private LocalDateTime startTime = null; private LocalDateTime createTime = LocalDateTime.now(); private LocalDateTime endTime = null; private LocalDateTime lastUpdated = null; private ExitStatus exitStatus = new ExitStatus(new org.springframework.batch.core.ExitStatus("UNKNOWN")); private ExecutionContext executionContext = new ExecutionContext(); private List failureExceptions = new CopyOnWriteArrayList<>(); public JobExecutionEvent() { super(0); } /** * Constructor for the StepExecution to initialize the DTO. * @param original the StepExecution to build this DTO around. */ public JobExecutionEvent(JobExecution original) { super(original.getId()); this.jobParameters = new JobParametersEvent(original.getJobParameters().parameters()); this.jobInstance = new JobInstanceEvent(original.getJobInstance().getId(), original.getJobInstance().getJobName()); for (StepExecution stepExecution : original.getStepExecutions()) { this.stepExecutions.add(new StepExecutionEvent(stepExecution)); } this.status = original.getStatus(); this.startTime = original.getStartTime(); this.createTime = original.getCreateTime(); this.endTime = original.getEndTime(); this.lastUpdated = original.getLastUpdated(); this.exitStatus = new ExitStatus(original.getExitStatus()); this.executionContext = original.getExecutionContext(); this.failureExceptions = original.getFailureExceptions(); this.setVersion(original.getVersion()); } public JobParametersEvent getJobParameters() { return this.jobParameters; } public LocalDateTime getEndTime() { return this.endTime; } public void setEndTime(LocalDateTime endTime) { this.endTime = endTime; } public LocalDateTime getStartTime() { return this.startTime; } public void setStartTime(LocalDateTime startTime) { this.startTime = startTime; } public BatchStatus getStatus() { return this.status; } /** * Set the value of the status field. * @param status the status to set */ public void setStatus(BatchStatus status) { this.status = status; } /** * Upgrade the status field if the provided value is greater than the existing one. * Clients using this method to set the status can be sure that they don't overwrite a * failed status with an successful one. * @param status the new status value */ public void upgradeStatus(BatchStatus status) { this.status = this.status.upgradeTo(status); } /** * Convenience getter for the id of the enclosing job. Useful for DAO implementations. * @return the id of the enclosing job */ public Long getJobId() { if (this.jobInstance != null) { return this.jobInstance.getId(); } return null; } /** * @return the exitCode for the job. */ public ExitStatus getExitStatus() { return this.exitStatus; } /** * @param exitStatus the exit status for the job. */ public void setExitStatus(ExitStatus exitStatus) { this.exitStatus = exitStatus; } /** * @return the Job that is executing. */ public JobInstanceEvent getJobInstance() { return this.jobInstance; } public void setJobInstance(JobInstanceEvent jobInstance) { this.jobInstance = jobInstance; } /** * Accessor for the step executions. * @return the step executions that were registered */ public Collection getStepExecutions() { return Collections.unmodifiableList(new ArrayList<>(this.stepExecutions)); } /** * Returns the {@link ExecutionContext} for this execution. The content is expected to * be persisted after each step completion (successful or not). * @return the context */ public ExecutionContext getExecutionContext() { return this.executionContext; } /** * Sets the {@link ExecutionContext} for this execution. * @param executionContext the context */ public void setExecutionContext(ExecutionContext executionContext) { this.executionContext = executionContext; } /** * @return the time when this execution was created. */ public LocalDateTime getCreateTime() { return this.createTime; } /** * @param createTime creation time of this execution. */ public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } /** * Get the date representing the last time this JobExecution was updated in the * JobRepository. * @return Date representing the last time this JobExecution was updated. */ public LocalDateTime getLastUpdated() { return this.lastUpdated; } /** * Set the last time this {@link JobExecution} was updated. * @param lastUpdated The date the {@link JobExecution} was updated. */ public void setLastUpdated(LocalDateTime lastUpdated) { this.lastUpdated = lastUpdated; } public List getFailureExceptions() { return this.failureExceptions; } /** * Add the provided throwable to the failure exception list. * @param t a {@link Throwable} to be added to the exception list. */ public synchronized void addFailureException(Throwable t) { this.failureExceptions.add(t); } /** * Return all failure causing exceptions for this JobExecution, including step * executions. * @return List<Throwable> containing all exceptions causing failure for this * JobExecution. */ public synchronized List getAllFailureExceptions() { Set allExceptions = new HashSet<>(this.failureExceptions); for (StepExecutionEvent stepExecution : this.stepExecutions) { allExceptions.addAll(stepExecution.getFailureExceptions()); } return new ArrayList<>(allExceptions); } /* * (non-Javadoc) * * @see org.springframework.batch.core.domain.Entity#toString() */ @Override public String toString() { return super.toString() + String.format( ", startTime=%s, endTime=%s, lastUpdated=%s, status=%s, exitStatus=%s, job=[%s], jobParameters=[%s]", this.startTime, this.endTime, this.lastUpdated, this.status, this.exitStatus, this.jobInstance, this.jobParameters); } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/support/JobInstanceEvent.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; import org.springframework.batch.core.Entity; import org.springframework.util.Assert; /** * This is a JobInstance DTO created so that a * {@link org.springframework.batch.core.job.JobInstance} can be serialized into Json * without having to add mixins to an ObjectMapper. * * @author Glenn Renfro */ public class JobInstanceEvent extends Entity { private String jobName; public JobInstanceEvent() { super(-1L); } public JobInstanceEvent(Long id, String jobName) { super(id); Assert.hasLength(jobName, "jobName must have length greater than zero."); this.jobName = jobName; } /** * @return the job name. (Equivalent to getJob().getName()) */ public String getJobName() { return this.jobName; } public String toString() { return super.toString() + ", Job=[" + this.jobName + "]"; } public long getInstanceId() { return super.getId(); } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/support/JobParameterEvent.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; import java.util.Date; import java.util.Objects; import org.springframework.batch.core.job.parameters.JobParameter; /** * This is a JobParameter DTO created so that a * {@link org.springframework.batch.core.job.parameters.JobParameter} can be serialized * into Json without having to add mixins to an ObjectMapper. * * @author Glenn Renfro */ public class JobParameterEvent { private Object parameter; private boolean identifying; public JobParameterEvent() { } public JobParameterEvent(JobParameter jobParameter) { this.parameter = jobParameter.value(); this.identifying = jobParameter.identifying(); } public boolean isIdentifying() { return this.identifying; } /** * @return the value contained within this JobParameter. */ public Object getValue() { if (this.parameter != null && this.parameter.getClass().isInstance(Date.class)) { return new Date(((Date) this.parameter).getTime()); } else { return this.parameter; } } @Override public boolean equals(Object obj) { if (!(obj instanceof JobParameterEvent)) { return false; } if (this == obj) { return true; } JobParameterEvent rhs = (JobParameterEvent) obj; return Objects.equals(this.parameter, rhs.parameter); } @Override public String toString() { return this.parameter == null ? null : this.parameter.toString(); } @Override public int hashCode() { final int BASE_HASH = 7; final int MULTIPLIER_HASH = 21; return BASE_HASH + MULTIPLIER_HASH * this.parameter.hashCode(); } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/support/JobParametersEvent.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; import java.util.HashSet; import java.util.Set; import org.springframework.batch.core.job.parameters.JobParameter; /** * This is a JobParametersEvent DTO created so that a * {@link org.springframework.batch.core.job.parameters.JobParameters} can be serialized * into Json without having to add mixins to an ObjectMapper. * * @author Glenn Renfro */ public class JobParametersEvent { private final Set parameters; public JobParametersEvent() { this.parameters = new HashSet<>(); } public JobParametersEvent(Set> jobParameters) { this.parameters = new HashSet<>(); for (JobParameter entry : jobParameters) { this.parameters.add(new JobParameterEvent(entry)); } } /** * Get a map of all parameters, including string, long, and date. * @return an unmodifiable map containing all parameters. */ public Set getParameters() { return new HashSet<>(this.parameters); } /** * @return true if the parameters is empty, false otherwise. */ public boolean isEmpty() { return this.parameters.isEmpty(); } @Override public boolean equals(Object obj) { if (!(obj instanceof JobParametersEvent)) { return false; } if (obj == this) { return true; } JobParametersEvent rhs = (JobParametersEvent) obj; return this.parameters.equals(rhs.parameters); } @Override public int hashCode() { final int BASE_HASH = 17; final int MULTIPLIER_HASH = 23; return BASE_HASH + MULTIPLIER_HASH * this.parameters.hashCode(); } @Override public String toString() { return this.parameters.toString(); } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/support/MessagePublisher.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; import org.springframework.cloud.stream.function.StreamBridge; import org.springframework.messaging.Message; import org.springframework.messaging.support.MessageBuilder; import org.springframework.util.Assert; /** * Utility class that sends batch job listener payloads to the notification channel. * * @param

payload type * @author Glenn Renfro */ public class MessagePublisher

{ private final StreamBridge streamBridge; public MessagePublisher(StreamBridge streamBridge) { Assert.notNull(streamBridge, "streamBridge must not be null"); this.streamBridge = streamBridge; } public final void publish(String bindingName, P payload) { if (payload instanceof Message) { this.publishMessage(bindingName, (Message) payload); } else { Message

message = MessageBuilder.withPayload(payload).build(); this.streamBridge.send(bindingName, message); } } private void publishMessage(String bindingName, Message message) { this.streamBridge.send(bindingName, message); } public void publishWithThrowableHeader(String bindingName, P payload, String header) { Message

message = MessageBuilder.withPayload(payload) .setHeader(BatchJobHeaders.BATCH_EXCEPTION, header) .build(); publishMessage(bindingName, message); } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/support/StepExecutionEvent.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; import java.time.LocalDateTime; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.Entity; import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.util.Assert; /** * This is a StepExecution DTO created so that a * {@link org.springframework.batch.core.step.StepExecution} can be serialized into Json * without having to add mixins to an ObjectMapper. * * @author Glenn Renfro */ public class StepExecutionEvent extends Entity { private long jobExecutionId; private String stepName; private BatchStatus status = BatchStatus.STARTING; private long readCount = 0; private long writeCount = 0; private long commitCount = 0; private long rollbackCount = 0; private long readSkipCount = 0; private long processSkipCount = 0; private long writeSkipCount = 0; private LocalDateTime startTime = LocalDateTime.now(); private LocalDateTime endTime = null; private LocalDateTime lastUpdated = null; private ExecutionContext executionContext = new ExecutionContext(); private ExitStatus exitStatus = new ExitStatus(org.springframework.batch.core.ExitStatus.EXECUTING); private boolean terminateOnly; private long filterCount; private List failureExceptions = new CopyOnWriteArrayList<>(); public StepExecutionEvent() { super(0); } /** * Constructor for the StepExecution to initialize the DTO. * @param stepExecution the StepExecution to build this DTO around. */ public StepExecutionEvent(StepExecution stepExecution) { super(stepExecution.getJobExecutionId()); Assert.notNull(stepExecution, "StepExecution must be provided to re-hydrate an existing StepExecutionEvent"); Assert.notNull(stepExecution.getJobExecution(), "JobExecution must be provided to re-hydrate an existing StepExecutionEvent"); this.jobExecutionId = stepExecution.getJobExecutionId(); this.stepName = stepExecution.getStepName(); this.status = stepExecution.getStatus(); this.exitStatus = new ExitStatus(stepExecution.getExitStatus()); this.executionContext = stepExecution.getExecutionContext(); for (Throwable throwable : stepExecution.getFailureExceptions()) { this.failureExceptions.add(throwable); } this.terminateOnly = stepExecution.isTerminateOnly(); this.endTime = stepExecution.getEndTime(); this.lastUpdated = stepExecution.getLastUpdated(); this.startTime = stepExecution.getStartTime(); this.commitCount = stepExecution.getCommitCount(); this.filterCount = stepExecution.getFilterCount(); this.processSkipCount = stepExecution.getProcessSkipCount(); this.readCount = stepExecution.getReadCount(); this.readSkipCount = stepExecution.getReadSkipCount(); this.rollbackCount = stepExecution.getRollbackCount(); this.writeCount = stepExecution.getWriteCount(); this.writeSkipCount = stepExecution.getWriteSkipCount(); } /** * Returns the {@link ExecutionContext} for this execution. * @return the attributes */ public ExecutionContext getExecutionContext() { return this.executionContext; } /** * Sets the {@link ExecutionContext} for this execution. * @param executionContext the attributes */ public void setExecutionContext(ExecutionContext executionContext) { this.executionContext = executionContext; } /** * Returns the current number of commits for this execution. * @return the current number of commits */ public long getCommitCount() { return this.commitCount; } /** * Sets the current number of commits for this execution. * @param commitCount the current number of commits */ public void setCommitCount(int commitCount) { this.commitCount = commitCount; } /** * Returns the time that this execution ended. * @return the time that this execution ended */ public LocalDateTime getEndTime() { return this.endTime; } /** * Sets the time that this execution ended. * @param endTime the time that this execution ended */ public void setEndTime(LocalDateTime endTime) { this.endTime = endTime; } /** * Returns the current number of items read for this execution. * @return the current number of items read for this execution */ public long getReadCount() { return this.readCount; } /** * Sets the current number of read items for this execution. * @param readCount the current number of read items for this execution */ public void setReadCount(int readCount) { this.readCount = readCount; } /** * Returns the current number of items written for this execution. * @return the current number of items written for this execution */ public long getWriteCount() { return this.writeCount; } /** * Sets the current number of written items for this execution. * @param writeCount the current number of written items for this execution */ public void setWriteCount(int writeCount) { this.writeCount = writeCount; } /** * Returns the current number of rollbacks for this execution. * @return the current number of rollbacks for this execution */ public long getRollbackCount() { return this.rollbackCount; } /** * Setter for number of rollbacks for this execution. * @param rollbackCount the number of rollbacks for this execution */ public void setRollbackCount(int rollbackCount) { this.rollbackCount = rollbackCount; } /** * Returns the current number of items filtered out of this execution. * @return the current number of items filtered out of this execution */ public long getFilterCount() { return this.filterCount; } /** * Public setter for the number of items filtered out of this execution. * @param filterCount the number of items filtered out of this execution to set */ public void setFilterCount(int filterCount) { this.filterCount = filterCount; } /** * Gets the time this execution started. * @return the time this execution started */ public LocalDateTime getStartTime() { return this.startTime; } /** * Sets the time this execution started. * @param startTime the time this execution started */ public void setStartTime(LocalDateTime startTime) { this.startTime = startTime; } /** * Returns the current status of this step. * @return the current status of this step */ public BatchStatus getStatus() { return this.status; } /** * Sets the current status of this step. * @param status the current status of this step */ public void setStatus(BatchStatus status) { this.status = status; } /** * @return the name of the step. */ public String getStepName() { return this.stepName; } public void setStepName(String stepName) { this.stepName = stepName; } /** * @return the exitCode */ public ExitStatus getExitStatus() { return this.exitStatus; } /** * @param exitStatus the {@link ExitStatus} for the step. */ public void setExitStatus(ExitStatus exitStatus) { this.exitStatus = exitStatus; } /** * @return flag to indicate that an execution should halt */ public boolean isTerminateOnly() { return this.terminateOnly; } /** * Set a flag that will signal to an execution environment that this execution (and * its surrounding job) wishes to exit. */ public void setTerminateOnly() { this.terminateOnly = true; } /** * @return the total number of items skipped. */ public long getSkipCount() { return this.readSkipCount + this.processSkipCount + this.writeSkipCount; } /** * Increment the number of commits. */ public void incrementCommitCount() { this.commitCount++; } /** * @return the number of records skipped on read. */ public long getReadSkipCount() { return this.readSkipCount; } /** * Set the number of records skipped on read. * @param readSkipCount the number of records to be skipped on read. */ public void setReadSkipCount(int readSkipCount) { this.readSkipCount = readSkipCount; } /** * @return the number of records skipped on write */ public long getWriteSkipCount() { return this.writeSkipCount; } /** * Set the number of records skipped on write. * @param writeSkipCount the number of records to be skipped on write. */ public void setWriteSkipCount(int writeSkipCount) { this.writeSkipCount = writeSkipCount; } /** * @return the number of records skipped during processing */ public long getProcessSkipCount() { return this.processSkipCount; } /** * Set the number of records skipped during processing. * @param processSkipCount the number of records skip during processing. */ public void setProcessSkipCount(int processSkipCount) { this.processSkipCount = processSkipCount; } /** * @return the Date representing the last time this execution was persisted. */ public LocalDateTime getLastUpdated() { return this.lastUpdated; } /** * Set the time when the StepExecution was last updated before persisting. * @param lastUpdated the {@link LocalDateTime} the StepExecution was last updated. */ public void setLastUpdated(LocalDateTime lastUpdated) { this.lastUpdated = lastUpdated; } public List getFailureExceptions() { return this.failureExceptions; } public long getJobExecutionId() { return this.jobExecutionId; } /* * (non-Javadoc) * * @see org.springframework.batch.container.common.domain.Entity#equals(java. * lang.Object) */ @Override public boolean equals(Object obj) { if (!(obj instanceof StepExecution)) { return super.equals(obj); } StepExecution other = (StepExecution) obj; return this.stepName.equals(other.getStepName()) && (this.jobExecutionId == other.getJobExecutionId()) && getId() == other.getId(); } /* * (non-Javadoc) * * @see org.springframework.batch.container.common.domain.Entity#hashCode() */ @Override public int hashCode() { Object jobExecutionId = getJobExecutionId(); Long id = getId(); return super.hashCode() + 31 * (this.stepName != null ? this.stepName.hashCode() : 0) + 91 * (jobExecutionId != null ? jobExecutionId.hashCode() : 0) + 59 * (id != null ? id.hashCode() : 0); } @Override public String toString() { return String.format(getSummary() + ", exitDescription=%s", this.exitStatus.getExitDescription()); } public String getSummary() { return super.toString() + String.format( ", name=%s, status=%s, exitStatus=%s, readCount=%d, " + "filterCount=%d, writeCount=%d readSkipCount=%d, writeSkipCount=%d" + ", processSkipCount=%d, commitCount=%d, rollbackCount=%d", this.stepName, this.status, this.exitStatus.getExitCode(), this.readCount, this.filterCount, this.writeCount, this.readSkipCount, this.writeSkipCount, this.processSkipCount, this.commitCount, this.rollbackCount); } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/support/TaskBatchEventListenerBeanPostProcessor.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; import java.lang.reflect.Field; import org.springframework.aot.hint.MemberCategory; import org.springframework.aot.hint.RuntimeHints; import org.springframework.aot.hint.RuntimeHintsRegistrar; import org.springframework.batch.core.job.AbstractJob; import org.springframework.batch.core.listener.ChunkListener; import org.springframework.batch.core.listener.ItemProcessListener; import org.springframework.batch.core.listener.ItemReadListener; import org.springframework.batch.core.listener.ItemWriteListener; import org.springframework.batch.core.listener.JobExecutionListener; import org.springframework.batch.core.listener.SkipListener; import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.step.AbstractStep; import org.springframework.batch.core.step.item.ChunkOrientedTasklet; import org.springframework.batch.core.step.item.SimpleChunkProcessor; import org.springframework.batch.core.step.item.SimpleChunkProvider; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.core.step.tasklet.TaskletStep; import org.springframework.beans.BeansException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.cloud.task.batch.listener.BatchEventAutoConfiguration; import org.springframework.cloud.task.batch.listener.support.TaskBatchEventListenerBeanPostProcessor.RuntimeHint; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.ImportRuntimeHints; import org.springframework.util.ReflectionUtils; /** * Attaches the listeners to the job and its steps. Based on the type of bean that is * being processed will determine what listener is attached. *

    *
  • If the bean is of type AbstractJob then the JobExecutionListener is registered with * this bean.
  • *
  • If the bean is of type AbstractStep then the StepExecutionListener is registered * with this bean.
  • *
  • If the bean is of type TaskletStep then the ChunkEventListener is registered with * this bean.
  • *
  • If the tasklet for the TaskletStep is of type ChunkOrientedTasklet the following * listeners will be registered.
  • *
  • *
      *
    • ItemReadListener with the ChunkProvider.
    • *
    • ItemProcessListener with the ChunkProcessor.
    • *
    • ItemWriteEventsListener with the ChunkProcessor.
    • *
    • SkipEventsListener with the ChunkProcessor.
    • *
    *
  • *
* * @author Michael Minella * @author Glenn Renfro */ @ImportRuntimeHints(RuntimeHint.class) public class TaskBatchEventListenerBeanPostProcessor implements BeanPostProcessor { @Autowired private ApplicationContext applicationContext; @Override public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { registerJobExecutionEventListener(bean); if (bean instanceof AbstractStep) { registerStepExecutionEventListener(bean); if (bean instanceof TaskletStep taskletStep) { Tasklet tasklet = taskletStep.getTasklet(); registerChunkEventsListener(bean); if (tasklet instanceof ChunkOrientedTasklet) { Field chunkProviderField = ReflectionUtils.findField(ChunkOrientedTasklet.class, "chunkProvider"); ReflectionUtils.makeAccessible(chunkProviderField); SimpleChunkProvider chunkProvider = (SimpleChunkProvider) ReflectionUtils .getField(chunkProviderField, tasklet); Field chunkProcessorField = ReflectionUtils.findField(ChunkOrientedTasklet.class, "chunkProcessor"); ReflectionUtils.makeAccessible(chunkProcessorField); SimpleChunkProcessor chunkProcessor = (SimpleChunkProcessor) ReflectionUtils .getField(chunkProcessorField, tasklet); registerItemReadEvents(chunkProvider); registerSkipEvents(chunkProvider); registerItemProcessEvents(chunkProcessor); registerItemWriteEvents(chunkProcessor); registerSkipEvents(chunkProcessor); } } } return bean; } @Override public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { return bean; } private void registerItemProcessEvents(SimpleChunkProcessor chunkProcessor) { if (this.applicationContext.containsBean(BatchEventAutoConfiguration.ITEM_PROCESS_EVENTS_LISTENER)) { chunkProcessor.registerListener((ItemProcessListener) this.applicationContext .getBean(BatchEventAutoConfiguration.ITEM_PROCESS_EVENTS_LISTENER)); } } private void registerItemReadEvents(SimpleChunkProvider chunkProvider) { if (this.applicationContext.containsBean(BatchEventAutoConfiguration.ITEM_READ_EVENTS_LISTENER)) { chunkProvider.registerListener((ItemReadListener) this.applicationContext .getBean(BatchEventAutoConfiguration.ITEM_READ_EVENTS_LISTENER)); } } private void registerItemWriteEvents(SimpleChunkProcessor chunkProcessor) { if (this.applicationContext.containsBean(BatchEventAutoConfiguration.ITEM_WRITE_EVENTS_LISTENER)) { chunkProcessor.registerListener((ItemWriteListener) this.applicationContext .getBean(BatchEventAutoConfiguration.ITEM_WRITE_EVENTS_LISTENER)); } } private void registerSkipEvents(SimpleChunkProvider chunkProvider) { if (this.applicationContext.containsBean(BatchEventAutoConfiguration.SKIP_EVENTS_LISTENER)) { chunkProvider.registerListener( (SkipListener) this.applicationContext.getBean(BatchEventAutoConfiguration.SKIP_EVENTS_LISTENER)); } } private void registerSkipEvents(SimpleChunkProcessor chunkProcessor) { if (this.applicationContext.containsBean(BatchEventAutoConfiguration.SKIP_EVENTS_LISTENER)) { chunkProcessor.registerListener( (SkipListener) this.applicationContext.getBean(BatchEventAutoConfiguration.SKIP_EVENTS_LISTENER)); } } private void registerChunkEventsListener(Object bean) { if (this.applicationContext.containsBean(BatchEventAutoConfiguration.CHUNK_EVENTS_LISTENER)) { ((TaskletStep) bean).registerChunkListener( (ChunkListener) this.applicationContext.getBean(BatchEventAutoConfiguration.CHUNK_EVENTS_LISTENER)); } } private void registerJobExecutionEventListener(Object bean) { if (bean instanceof AbstractJob job && this.applicationContext.containsBean(BatchEventAutoConfiguration.JOB_EXECUTION_EVENTS_LISTENER)) { JobExecutionListener jobExecutionEventsListener = (JobExecutionListener) this.applicationContext .getBean(BatchEventAutoConfiguration.JOB_EXECUTION_EVENTS_LISTENER); job.registerJobExecutionListener(jobExecutionEventsListener); } } private void registerStepExecutionEventListener(Object bean) { if (this.applicationContext.containsBean(BatchEventAutoConfiguration.STEP_EXECUTION_EVENTS_LISTENER)) { StepExecutionListener stepExecutionListener = (StepExecutionListener) this.applicationContext .getBean(BatchEventAutoConfiguration.STEP_EXECUTION_EVENTS_LISTENER); AbstractStep step = (AbstractStep) bean; step.registerStepExecutionListener(stepExecutionListener); } } static class RuntimeHint implements RuntimeHintsRegistrar { @Override public void registerHints(RuntimeHints hints, ClassLoader classLoader) { hints.reflection().registerType(ChunkOrientedTasklet.class, MemberCategory.DECLARED_FIELDS); } } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/support/TaskEventProperties.java ================================================ /* * Copyright 2017-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.core.Ordered; /** * @author Ali Shahbour */ @ConfigurationProperties(prefix = "spring.cloud.task.batch.events") public class TaskEventProperties { /** * Establishes the default {@link Ordered} precedence for * {@link org.springframework.batch.core.JobExecutionListener}. */ private int jobExecutionOrder = Ordered.LOWEST_PRECEDENCE; /** * Establishes the default {@link Ordered} precedence for * {@link org.springframework.batch.core.StepExecutionListener}. */ private int stepExecutionOrder = Ordered.LOWEST_PRECEDENCE; /** * Establishes the default {@link Ordered} precedence for * {@link org.springframework.batch.core.ItemReadListener}. */ private int itemReadOrder = Ordered.LOWEST_PRECEDENCE; /** * Establishes the default {@link Ordered} precedence for * {@link org.springframework.batch.core.ItemProcessListener}. */ private int itemProcessOrder = Ordered.LOWEST_PRECEDENCE; /** * Establishes the default {@link Ordered} precedence for * {@link org.springframework.batch.core.ItemWriteListener}. */ private int itemWriteOrder = Ordered.LOWEST_PRECEDENCE; /** * Establishes the default {@link Ordered} precedence for * {@link org.springframework.batch.core.ChunkListener}. */ private int chunkOrder = Ordered.LOWEST_PRECEDENCE; /** * Establishes the default {@link Ordered} precedence for * {@link org.springframework.batch.core.SkipListener}. */ private int skipOrder = Ordered.LOWEST_PRECEDENCE; private String jobExecutionEventBindingName = "job-execution-events"; private String skipEventBindingName = "skip-events"; private String chunkEventBindingName = "chunk-events"; private String itemProcessEventBindingName = "item-process-events"; private String itemReadEventBindingName = "item-read-events"; private String itemWriteEventBindingName = "item-write-events"; private String stepExecutionEventBindingName = "step-execution-events"; private String taskEventBindingName = "task-events"; public int getJobExecutionOrder() { return this.jobExecutionOrder; } public void setJobExecutionOrder(int jobExecutionOrder) { this.jobExecutionOrder = jobExecutionOrder; } public int getStepExecutionOrder() { return this.stepExecutionOrder; } public void setStepExecutionOrder(int stepExecutionOrder) { this.stepExecutionOrder = stepExecutionOrder; } public int getItemReadOrder() { return this.itemReadOrder; } public void setItemReadOrder(int itemReadOrder) { this.itemReadOrder = itemReadOrder; } public int getItemProcessOrder() { return this.itemProcessOrder; } public void setItemProcessOrder(int itemProcessOrder) { this.itemProcessOrder = itemProcessOrder; } public int getItemWriteOrder() { return this.itemWriteOrder; } public void setItemWriteOrder(int itemWriteOrder) { this.itemWriteOrder = itemWriteOrder; } public int getChunkOrder() { return this.chunkOrder; } public void setChunkOrder(int chunkOrder) { this.chunkOrder = chunkOrder; } public int getSkipOrder() { return this.skipOrder; } public void setSkipOrder(int skipOrder) { this.skipOrder = skipOrder; } public String getJobExecutionEventBindingName() { return jobExecutionEventBindingName; } public void setJobExecutionEventBindingName(String jobExecutionEventBindingName) { this.jobExecutionEventBindingName = jobExecutionEventBindingName; } public String getSkipEventBindingName() { return skipEventBindingName; } public void setSkipEventBindingName(String skipEventBindingName) { this.skipEventBindingName = skipEventBindingName; } public String getChunkEventBindingName() { return chunkEventBindingName; } public void setChunkEventBindingName(String chunkEventBindingName) { this.chunkEventBindingName = chunkEventBindingName; } public String getItemProcessEventBindingName() { return itemProcessEventBindingName; } public void setItemProcessEventBindingName(String itemProcessEventBindingName) { this.itemProcessEventBindingName = itemProcessEventBindingName; } public String getItemReadEventBindingName() { return itemReadEventBindingName; } public void setItemReadEventBindingName(String itemReadEventBindingName) { this.itemReadEventBindingName = itemReadEventBindingName; } public String getItemWriteEventBindingName() { return itemWriteEventBindingName; } public void setItemWriteEventBindingName(String itemWriteEventBindingName) { this.itemWriteEventBindingName = itemWriteEventBindingName; } public String getStepExecutionEventBindingName() { return stepExecutionEventBindingName; } public void setStepExecutionEventBindingName(String stepExecutionEventBindingName) { this.stepExecutionEventBindingName = stepExecutionEventBindingName; } public String getTaskEventBindingName() { return taskEventBindingName; } public void setTaskEventBindingName(String taskEventBindingName) { this.taskEventBindingName = taskEventBindingName; } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/batch/listener/support/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Support classes for stream-based batch listener components in Spring Cloud Task. */ package org.springframework.cloud.task.batch.listener.support; ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/listener/TaskEventAutoConfiguration.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.AutoConfigureBefore; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.stream.config.BindingServiceConfiguration; import org.springframework.cloud.stream.function.StreamBridge; import org.springframework.cloud.task.batch.listener.support.TaskEventProperties; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.PropertySource; /** * @author Michael Minella * @author Glenn Renfro */ @AutoConfiguration @ConditionalOnClass(StreamBridge.class) @ConditionalOnBean(TaskLifecycleListener.class) @ConditionalOnExpression("T(org.springframework.util.StringUtils).isEmpty('${spring.batch.job.jobName:}')") // @checkstyle:off @ConditionalOnProperty(prefix = "spring.cloud.task.events", name = "enabled", havingValue = "true", matchIfMissing = true) // @checkstyle:on @PropertySource("classpath:/org/springframework/cloud/task/application.properties") @AutoConfigureBefore(BindingServiceConfiguration.class) @AutoConfigureAfter(SimpleTaskAutoConfiguration.class) @EnableConfigurationProperties(TaskEventProperties.class) public class TaskEventAutoConfiguration { /** * Configuration for a {@link TaskExecutionListener}. */ @AutoConfiguration public static class ListenerConfiguration { @Bean public TaskExecutionListener taskEventEmitter(StreamBridge streamBridge, TaskEventProperties taskEventProperties) { return new TaskExecutionListener() { @Override public void onTaskStartup(TaskExecution taskExecution) { streamBridge.send(taskEventProperties.getTaskEventBindingName(), taskExecution); } @Override public void onTaskEnd(TaskExecution taskExecution) { streamBridge.send(taskEventProperties.getTaskEventBindingName(), taskExecution); } @Override public void onTaskFailed(TaskExecution taskExecution, Throwable throwable) { streamBridge.send(taskEventProperties.getTaskEventBindingName(), taskExecution); } }; } } } ================================================ FILE: spring-cloud-task-stream/src/main/java/org/springframework/cloud/task/listener/package-info.java ================================================ /* * Copyright 2015-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Stream-based task lifecycle listener support for Spring Cloud Task. */ package org.springframework.cloud.task.listener; ================================================ FILE: spring-cloud-task-stream/src/main/resources/META-INF/additional-spring-configuration-metadata.json ================================================ { "properties": [ { "defaultValue": true, "name": "spring.cloud.task.events.enabled", "description": "This property is used to determine if a task app should emit task events.", "type": "java.lang.Boolean" } ] } ================================================ FILE: spring-cloud-task-stream/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports ================================================ org.springframework.cloud.task.listener.TaskEventAutoConfiguration org.springframework.cloud.task.batch.listener.BatchEventAutoConfiguration ================================================ FILE: spring-cloud-task-stream/src/main/resources/org/springframework/cloud/task/application.properties ================================================ spring.cloud.stream.bindings.task-events.contentType=application/json spring.cloud.stream.bindings.item-write-events.contentType=application/json spring.cloud.stream.bindings.item-read-events.contentType=application/json spring.cloud.stream.bindings.item-process-events.contentType=application/json spring.cloud.stream.bindings.skip-events.contentType=application/json spring.cloud.stream.bindings.step-execution-events.contentType=application/json spring.cloud.stream.bindings.job-execution-events.contentType=application/json ================================================ FILE: spring-cloud-task-stream/src/test/java/org/springframework/cloud/task/batch/listener/EventListenerTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import tools.jackson.databind.DeserializationFeature; import tools.jackson.databind.ObjectMapper; import tools.jackson.databind.json.JsonMapper; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.JobInstance; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.scope.context.StepContext; import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.infrastructure.item.Chunk; import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.cloud.stream.binder.test.OutputDestination; import org.springframework.cloud.stream.binder.test.TestChannelBinderConfiguration; import org.springframework.cloud.stream.function.StreamBridge; import org.springframework.cloud.task.batch.listener.support.JobExecutionEvent; import org.springframework.cloud.task.batch.listener.support.MessagePublisher; import org.springframework.cloud.task.batch.listener.support.StepExecutionEvent; import org.springframework.cloud.task.batch.listener.support.TaskEventProperties; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.core.Ordered; import org.springframework.messaging.Message; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro * @author Ali Shahbour */ public class EventListenerTests { private EventEmittingSkipListener eventEmittingSkipListener; private EventEmittingItemProcessListener eventEmittingItemProcessListener; private EventEmittingItemReadListener eventEmittingItemReadListener; private EventEmittingItemWriteListener eventEmittingItemWriteListener; private EventEmittingJobExecutionListener eventEmittingJobExecutionListener; private EventEmittingStepExecutionListener eventEmittingStepExecutionListener; private EventEmittingChunkListener eventEmittingChunkListener; private ConfigurableApplicationContext applicationContext; private final TaskEventProperties taskEventProperties = new TaskEventProperties(); private ObjectMapper objectMapper = new ObjectMapper(); @BeforeEach public void beforeTests() { objectMapper = JsonMapper.builder().disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES).build(); this.applicationContext = new SpringApplicationBuilder() .sources(TestChannelBinderConfiguration.getCompleteConfiguration(BatchEventsApplication.class)) .web(WebApplicationType.NONE) .build() .run(); StreamBridge streamBridge = this.applicationContext.getBean(StreamBridge.class); MessagePublisher messagePublisher = new MessagePublisher(streamBridge); this.eventEmittingSkipListener = new EventEmittingSkipListener(messagePublisher, this.taskEventProperties); this.eventEmittingItemProcessListener = new EventEmittingItemProcessListener(messagePublisher, this.taskEventProperties); this.eventEmittingItemReadListener = new EventEmittingItemReadListener(messagePublisher, this.taskEventProperties); this.eventEmittingItemWriteListener = new EventEmittingItemWriteListener(messagePublisher, this.taskEventProperties); this.eventEmittingJobExecutionListener = new EventEmittingJobExecutionListener(messagePublisher, this.taskEventProperties); this.eventEmittingStepExecutionListener = new EventEmittingStepExecutionListener(messagePublisher, this.taskEventProperties); this.eventEmittingChunkListener = new EventEmittingChunkListener(messagePublisher, 0, this.taskEventProperties); } @AfterEach public void tearDown() { if (this.applicationContext != null && this.applicationContext.isActive()) { this.applicationContext.close(); } } @Test public void testEventListenerOrderProperty() { assertThat(Ordered.LOWEST_PRECEDENCE).isEqualTo(this.eventEmittingSkipListener.getOrder()); assertThat(Ordered.LOWEST_PRECEDENCE).isEqualTo(this.eventEmittingItemProcessListener.getOrder()); assertThat(Ordered.LOWEST_PRECEDENCE).isEqualTo(this.eventEmittingItemReadListener.getOrder()); assertThat(Ordered.LOWEST_PRECEDENCE).isEqualTo(this.eventEmittingItemWriteListener.getOrder()); assertThat(Ordered.LOWEST_PRECEDENCE).isEqualTo(this.eventEmittingJobExecutionListener.getOrder()); assertThat(Ordered.LOWEST_PRECEDENCE).isEqualTo(this.eventEmittingStepExecutionListener.getOrder()); assertThat(0).isEqualTo(this.eventEmittingChunkListener.getOrder()); } @Test public void testItemProcessListenerOnProcessorError() { this.eventEmittingItemProcessListener.onProcessError("HELLO", new RuntimeException("Test Exception")); assertThat(getStringFromDestination(this.taskEventProperties.getItemProcessEventBindingName())) .isEqualTo("Exception while item was being processed"); } @Test public void testItemProcessListenerAfterProcess() { this.eventEmittingItemProcessListener.afterProcess("HELLO_AFTER_PROCESS_EQUAL", "HELLO_AFTER_PROCESS_EQUAL"); assertThat(getStringFromDestination(this.taskEventProperties.getItemProcessEventBindingName())) .isEqualTo("item equaled result after processing"); this.eventEmittingItemProcessListener.afterProcess("HELLO_NOT_EQUAL", "WORLD"); assertThat(getStringFromDestination(this.taskEventProperties.getItemProcessEventBindingName())) .isEqualTo("item did not equal result after processing"); this.eventEmittingItemProcessListener.afterProcess("HELLO_AFTER_PROCESS", null); assertThat(getStringFromDestination(this.taskEventProperties.getItemProcessEventBindingName())) .isEqualTo("1 item was filtered"); } @Test public void testItemProcessBeforeProcessor() { this.eventEmittingItemProcessListener.beforeProcess("HELLO_BEFORE_PROCESS"); assertNoMessageFromDestination(this.taskEventProperties.getItemProcessEventBindingName()); } @Test public void EventEmittingSkipListenerSkipRead() { this.eventEmittingSkipListener.onSkipInRead(new RuntimeException("Text Exception")); assertThat(getStringFromDestination(this.taskEventProperties.getSkipEventBindingName())) .isEqualTo("Skipped when reading."); } @Test public void EventEmittingSkipListenerSkipWrite() { final String MESSAGE = "\"HELLO_SKIP_WRITE\""; this.eventEmittingSkipListener.onSkipInWrite(MESSAGE, new RuntimeException("Text Exception")); assertThat(getStringFromDestination(this.taskEventProperties.getSkipEventBindingName())).isEqualTo(MESSAGE); } @Test public void EventEmittingSkipListenerSkipProcess() { final String MESSAGE = "\"HELLO_SKIP_PROCESS\""; this.eventEmittingSkipListener.onSkipInProcess(MESSAGE, new RuntimeException("Text Exception")); assertThat(getStringFromDestination(this.taskEventProperties.getSkipEventBindingName())).isEqualTo(MESSAGE); } @Test public void EventEmittingItemReadListener() { this.eventEmittingItemReadListener.onReadError(new RuntimeException("Text Exception")); assertThat(getStringFromDestination(this.taskEventProperties.getItemReadEventBindingName())) .isEqualTo("Exception while item was being read"); } @Test public void EventEmittingItemReadListenerBeforeRead() { this.eventEmittingItemReadListener.beforeRead(); assertNoMessageFromDestination(this.taskEventProperties.getItemReadEventBindingName()); } @Test public void EventEmittingItemReadListenerAfterRead() { this.eventEmittingItemReadListener.afterRead("HELLO_AFTER_READ"); assertNoMessageFromDestination(this.taskEventProperties.getItemReadEventBindingName()); } @Test public void EventEmittingItemWriteListenerBeforeWrite() { this.eventEmittingItemWriteListener.beforeWrite(getSampleList()); assertThat(getStringFromDestination(this.taskEventProperties.getItemWriteEventBindingName())) .isEqualTo("3 items to be written."); } @Test public void EventEmittingItemWriteListenerAfterWrite() { this.eventEmittingItemWriteListener.afterWrite(getSampleList()); assertThat(getStringFromDestination(this.taskEventProperties.getItemWriteEventBindingName())) .isEqualTo("3 items have been written."); } @Test public void EventEmittingItemWriteListenerWriteError() { RuntimeException exception = new RuntimeException("Text Exception"); this.eventEmittingItemWriteListener.onWriteError(exception, getSampleList()); assertThat(getStringFromDestination(this.taskEventProperties.getItemWriteEventBindingName())) .isEqualTo("Exception while 3 items are attempted to be written."); } @Test public void EventEmittingJobExecutionListenerBeforeJob() throws IOException { JobExecution jobExecution = getJobExecution(); this.eventEmittingJobExecutionListener.beforeJob(jobExecution); List> result = testListener(this.taskEventProperties.getJobExecutionEventBindingName(), 1); assertThat(result.get(0)).isNotNull(); JobExecutionEvent jobEvent = this.objectMapper.readValue(result.get(0).getPayload(), JobExecutionEvent.class); assertThat(jobEvent.getJobInstance().getJobName()).isEqualTo(jobExecution.getJobInstance().getJobName()); } @Test public void EventEmittingJobExecutionListenerAfterJob() throws IOException { JobExecution jobExecution = getJobExecution(); this.eventEmittingJobExecutionListener.afterJob(jobExecution); List> result = testListener(this.taskEventProperties.getJobExecutionEventBindingName(), 1); assertThat(result.get(0)).isNotNull(); JobExecutionEvent jobEvent = this.objectMapper.readValue(result.get(0).getPayload(), JobExecutionEvent.class); assertThat(jobEvent.getJobInstance().getJobName()).isEqualTo(jobExecution.getJobInstance().getJobName()); } @Test public void EventEmittingStepExecutionListenerBeforeStep() throws IOException { final String STEP_MESSAGE = "BEFORE_STEP_MESSAGE"; StepExecution stepExecution = new StepExecution(STEP_MESSAGE, getJobExecution()); this.eventEmittingStepExecutionListener.beforeStep(stepExecution); List> result = testListener(this.taskEventProperties.getStepExecutionEventBindingName(), 1); assertThat(result.get(0)).isNotNull(); StepExecutionEvent stepExecutionEvent = this.objectMapper.readValue(result.get(0).getPayload(), StepExecutionEvent.class); assertThat(stepExecutionEvent.getStepName()).isEqualTo(STEP_MESSAGE); } @Test public void EventEmittingStepExecutionListenerAfterStep() throws IOException { final String STEP_MESSAGE = "AFTER_STEP_MESSAGE"; StepExecution stepExecution = new StepExecution(STEP_MESSAGE, getJobExecution()); this.eventEmittingStepExecutionListener.afterStep(stepExecution); List> result = testListener(this.taskEventProperties.getStepExecutionEventBindingName(), 1); assertThat(result.get(0)).isNotNull(); StepExecutionEvent stepExecutionEvent = this.objectMapper.readValue(result.get(0).getPayload(), StepExecutionEvent.class); assertThat(stepExecutionEvent.getStepName()).isEqualTo(STEP_MESSAGE); } @Test public void EventEmittingChunkExecutionListenerBeforeChunk() { final String CHUNK_MESSAGE = "Before Chunk Processing"; this.eventEmittingChunkListener.beforeChunk(new Chunk<>(CHUNK_MESSAGE)); assertThat(getStringFromDestination(this.taskEventProperties.getChunkEventBindingName())) .isEqualTo(CHUNK_MESSAGE); } @Test public void EventEmittingChunkExecutionListenerAfterChunk() { final String CHUNK_MESSAGE = "After Chunk Processing"; this.eventEmittingChunkListener.afterChunk(new Chunk<>()); assertThat(getStringFromDestination(this.taskEventProperties.getChunkEventBindingName())) .isEqualTo(CHUNK_MESSAGE); } @Test public void EventEmittingChunkExecutionListenerAfterChunkError() { this.eventEmittingChunkListener.afterChunkError(getChunkContext()); assertNoMessageFromDestination(this.taskEventProperties.getChunkEventBindingName()); } private JobExecution getJobExecution() { final String JOB_NAME = UUID.randomUUID().toString(); JobInstance jobInstance = new JobInstance(1L, JOB_NAME); return new JobExecution(1L, jobInstance, new JobParameters()); } private Chunk getSampleList() { List testList = new ArrayList<>(3); testList.add("Hello"); testList.add("World"); testList.add("foo"); return new Chunk(testList); } private ChunkContext getChunkContext() { JobExecution jobExecution = getJobExecution(); StepExecution stepExecution = new StepExecution("STEP1", jobExecution); StepContext stepContext = new StepContext(stepExecution); ChunkContext chunkContext = new ChunkContext(stepContext); return chunkContext; } private List> testListener(String bindingName, int numberToRead) { List> results = new ArrayList<>(); OutputDestination target = this.applicationContext.getBean(OutputDestination.class); for (int i = 0; i < numberToRead; i++) { results.add(target.receive(10000, bindingName)); } return results; } private String getStringFromDestination(String bindingName) { List> result = testListener(bindingName, 1); assertThat(result.get(0)).isNotNull(); assertThat(new String(result.get(0).getPayload())); return new String(result.get(0).getPayload()); } private void assertNoMessageFromDestination(String bindingName) { List> result = testListener(bindingName, 1); assertThat(result.get(0)).isNull(); } @SpringBootApplication public static class BatchEventsApplication { } } ================================================ FILE: spring-cloud-task-stream/src/test/java/org/springframework/cloud/task/batch/listener/JobExecutionEventTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.JobInstance; import org.springframework.batch.core.job.parameters.JobParameter; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.cloud.task.batch.listener.support.JobExecutionEvent; import org.springframework.cloud.task.batch.listener.support.JobInstanceEvent; import org.springframework.cloud.task.batch.listener.support.JobParameterEvent; import org.springframework.cloud.task.batch.listener.support.StepExecutionEvent; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.cloud.task.configuration.SingleTaskConfiguration; import org.springframework.core.Ordered; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * @author Glenn Renfro. * @author Ali Shahbour */ public class JobExecutionEventTests { private static final String JOB_NAME = "FOODJOB"; private static final Long JOB_INSTANCE_ID = 1L; private static final Long JOB_EXECUTION_ID = 2L; private static final String[] LISTENER_BEAN_NAMES = { BatchEventAutoConfiguration.JOB_EXECUTION_EVENTS_LISTENER, BatchEventAutoConfiguration.STEP_EXECUTION_EVENTS_LISTENER, BatchEventAutoConfiguration.CHUNK_EVENTS_LISTENER, BatchEventAutoConfiguration.ITEM_READ_EVENTS_LISTENER, BatchEventAutoConfiguration.ITEM_WRITE_EVENTS_LISTENER, BatchEventAutoConfiguration.ITEM_PROCESS_EVENTS_LISTENER, BatchEventAutoConfiguration.SKIP_EVENTS_LISTENER }; private JobParameters jobParameters; private JobInstance jobInstance; @BeforeEach public void setup() { this.jobInstance = new JobInstance(JOB_INSTANCE_ID, JOB_NAME); this.jobParameters = new JobParameters(); } @Test public void testBasic() { JobExecution jobExecution; jobExecution = new JobExecution(JOB_EXECUTION_ID, this.jobInstance, this.jobParameters); JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(jobExecution); assertThat(jobExecutionEvent.getJobInstance()).as("jobInstance should not be null").isNotNull(); assertThat(jobExecutionEvent.getJobParameters()).as("jobParameters should not be null").isNotNull(); assertThat(jobExecutionEvent.getJobParameters().getParameters().size()).as("jobParameters size did not match") .isEqualTo(0); assertThat(jobExecutionEvent.getJobInstance().getJobName()).as("jobInstance name did not match") .isEqualTo(JOB_NAME); assertThat(jobExecutionEvent.getStepExecutions().size()).as("no step executions were expected").isEqualTo(0); assertThat(jobExecutionEvent.getExitStatus().getExitCode()).as("exitStatus did not match expected") .isEqualTo("UNKNOWN"); } @Test public void testJobParameters() { String[] JOB_PARAM_KEYS = { "A", "B", "C", "D" }; Date testDate = new Date(); JobParameter[] PARAMETERS = { new JobParameter("A", "FOO", String.class), new JobParameter("B", 1L, Long.class), new JobParameter("C", 1D, Double.class), new JobParameter("D", testDate, Date.class) }; Set> jobParamMap = new HashSet<>(); for (int paramCount = 0; paramCount < JOB_PARAM_KEYS.length; paramCount++) { jobParamMap.add(PARAMETERS[paramCount]); } this.jobParameters = new JobParameters(jobParamMap); JobExecution jobExecution; jobExecution = new JobExecution(JOB_EXECUTION_ID, this.jobInstance, this.jobParameters); JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(jobExecution); assertThat(jobExecutionEvent.getJobParameters().getParameters()).contains(new JobParameterEvent(PARAMETERS[0]), new JobParameterEvent(PARAMETERS[1]), new JobParameterEvent(PARAMETERS[2]), new JobParameterEvent(PARAMETERS[3])); } @Test public void testStepExecutions() { JobExecution jobExecution; jobExecution = new JobExecution(JOB_EXECUTION_ID, this.jobInstance, this.jobParameters); List stepsExecutions = new ArrayList<>(); stepsExecutions.add(new StepExecution(0, "foo", jobExecution)); stepsExecutions.add(new StepExecution(1, "bar", jobExecution)); stepsExecutions.add(new StepExecution(2, "baz", jobExecution)); jobExecution.addStepExecutions(stepsExecutions); JobExecutionEvent jobExecutionsEvent = new JobExecutionEvent(jobExecution); assertThat(jobExecutionsEvent.getStepExecutions().size()).as("stepExecutions count is incorrect").isEqualTo(3); Iterator iter = jobExecutionsEvent.getStepExecutions().iterator(); assertThat(iter.next().getStepName()).as("foo stepExecution is not present").isEqualTo("foo"); assertThat(iter.next().getStepName()).as("bar stepExecution is not present").isEqualTo("bar"); assertThat(iter.next().getStepName()).as("baz stepExecution is not present").isEqualTo("baz"); } @Test public void testDefaultConfiguration() { testDisabledConfiguration(null, null); } @Test public void testDisabledJobExecutionListener() { testDisabledConfiguration("spring.cloud.task.batch.events.job-execution.enabled", BatchEventAutoConfiguration.JOB_EXECUTION_EVENTS_LISTENER); } @Test public void testDisabledStepExecutionListener() { testDisabledConfiguration("spring.cloud.task.batch.events.step-execution.enabled", BatchEventAutoConfiguration.STEP_EXECUTION_EVENTS_LISTENER); } @Test public void testDisabledChunkListener() { testDisabledConfiguration("spring.cloud.task.batch.events.chunk.enabled", BatchEventAutoConfiguration.CHUNK_EVENTS_LISTENER); } @Test public void testDisabledItemReadListener() { testDisabledConfiguration("spring.cloud.task.batch.events.item-read.enabled", BatchEventAutoConfiguration.ITEM_READ_EVENTS_LISTENER); } @Test public void testDisabledItemWriteListener() { testDisabledConfiguration("spring.cloud.task.batch.events.item-write.enabled", BatchEventAutoConfiguration.ITEM_WRITE_EVENTS_LISTENER); } @Test public void testDisabledItemProcessListener() { testDisabledConfiguration("spring.cloud.task.batch.events.item-process.enabled", BatchEventAutoConfiguration.ITEM_PROCESS_EVENTS_LISTENER); } @Test public void testDisabledSkipEventListener() { testDisabledConfiguration("spring.cloud.task.batch.events.skip.enabled", BatchEventAutoConfiguration.SKIP_EVENTS_LISTENER); } @Test public void testDefaultConstructor() { JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(); assertThat(jobExecutionEvent.getExitStatus().getExitCode()).isEqualTo("UNKNOWN"); } @Test public void testFailureExceptions() { final String EXCEPTION_MESSAGE = "TEST EXCEPTION"; JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(); assertThat(jobExecutionEvent.getFailureExceptions().size()).isEqualTo(0); jobExecutionEvent.addFailureException(new IllegalStateException(EXCEPTION_MESSAGE)); assertThat(jobExecutionEvent.getFailureExceptions().size()).isEqualTo(1); assertThat(jobExecutionEvent.getAllFailureExceptions().size()).isEqualTo(1); assertThat(EXCEPTION_MESSAGE).isEqualTo(jobExecutionEvent.getFailureExceptions().get(0).getMessage()); assertThat(EXCEPTION_MESSAGE).isEqualTo(jobExecutionEvent.getAllFailureExceptions().get(0).getMessage()); } @Test public void testToString() { JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(); assertThat(jobExecutionEvent.toString().startsWith("JobExecutionEvent:")).isTrue(); } @Test public void testGetterSetters() { LocalDateTime date = LocalDateTime.now(); JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(); jobExecutionEvent.setLastUpdated(date); assertThat(jobExecutionEvent.getLastUpdated()).isEqualTo(date); jobExecutionEvent.setCreateTime(date); assertThat(jobExecutionEvent.getCreateTime()).isEqualTo(date); jobExecutionEvent.setEndTime(date); assertThat(jobExecutionEvent.getEndTime()).isEqualTo(date); jobExecutionEvent.setStartTime(date); assertThat(jobExecutionEvent.getStartTime()).isEqualTo(date); } @Test public void testExitStatus() { final String EXIT_CODE = "KNOWN"; JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(); assertThat(jobExecutionEvent.getExitStatus().getExitCode()).isEqualTo("UNKNOWN"); org.springframework.cloud.task.batch.listener.support.ExitStatus expectedExitStatus; expectedExitStatus = new org.springframework.cloud.task.batch.listener.support.ExitStatus(); expectedExitStatus.setExitCode(EXIT_CODE); jobExecutionEvent.setExitStatus(expectedExitStatus); assertThat(jobExecutionEvent.getExitStatus().getExitCode()).isEqualTo(EXIT_CODE); } @Test public void testJobInstance() { final String JOB_NAME = "KNOWN"; JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(); assertThat(jobExecutionEvent.getJobInstance()).isNull(); assertThat(jobExecutionEvent.getJobId()).isNull(); JobInstanceEvent expectedJobInstanceEvent = new JobInstanceEvent(1L, JOB_NAME); jobExecutionEvent.setJobInstance(expectedJobInstanceEvent); assertThat(jobExecutionEvent.getJobInstance().getJobName()).isEqualTo(expectedJobInstanceEvent.getJobName()); assertThat(jobExecutionEvent.getJobId()).isEqualTo(expectedJobInstanceEvent.getId()); } @Test public void testExecutionContext() { ExecutionContext executionContext = new ExecutionContext(); executionContext.put("hello", "world"); JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(); assertThat(jobExecutionEvent.getExecutionContext()).isNotNull(); jobExecutionEvent.setExecutionContext(executionContext); assertThat(jobExecutionEvent.getExecutionContext().getString("hello")).isEqualTo("world"); } @Test public void testBatchStatus() { JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(); assertThat(jobExecutionEvent.getStatus()).isEqualTo(BatchStatus.STARTING); jobExecutionEvent.setStatus(BatchStatus.ABANDONED); assertThat(jobExecutionEvent.getStatus()).isEqualTo(BatchStatus.ABANDONED); } @Test public void testUpgradeBatchStatus() { JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(); assertThat(jobExecutionEvent.getStatus()).isEqualTo(BatchStatus.STARTING); jobExecutionEvent.upgradeStatus(BatchStatus.FAILED); assertThat(jobExecutionEvent.getStatus()).isEqualTo(BatchStatus.FAILED); jobExecutionEvent.upgradeStatus(BatchStatus.COMPLETED); assertThat(jobExecutionEvent.getStatus()).isEqualTo(BatchStatus.FAILED); } @Test public void testOrderConfiguration() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withUserConfiguration(BatchEventAutoConfiguration.JobExecutionListenerConfiguration.class) .withBean("org.springframework.cloud.task.batch.listener.JobExecutionEventTests$BatchEventTestApplication", BatchEventTestApplication.class) .withPropertyValues("--spring.cloud.task.closecontext_enabled=false", "--spring.main.web-environment=false", "--spring.cloud.task.batch.events.chunk-order=5", "--spring.cloud.task.batch.events.item-process-order=5", "--spring.cloud.task.batch.events.item-read-order=5", "--spring.cloud.task.batch.events.item-write-order=5", "--spring.cloud.task.batch.events.job-execution-order=5", "--spring.cloud.task.batch.events.skip-order=5", "--spring.cloud.task.batch.events.step-execution-order=5"); applicationContextRunner.run((context) -> { for (String beanName : LISTENER_BEAN_NAMES) { Ordered ordered = (Ordered) context.getBean(beanName); assertThat(5).as("Expected order value of 5 for " + beanName).isEqualTo(ordered.getOrder()); } }); } @Test public void singleStepBatchJobSkip() { ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withUserConfiguration(BatchEventAutoConfiguration.JobExecutionListenerConfiguration.class) .withBean("org.springframework.cloud.task.batch.listener.JobExecutionEventTests$BatchEventTestApplication", BatchEventTestApplication.class) .withPropertyValues("--spring.cloud.task.closecontext_enabled=false", "--spring.main.web-environment=false", "spring.batch.job.jobName=FOO"); applicationContextRunner.run((context) -> { assertThatThrownBy(() -> context.getBean("jobExecutionEventsListener")) .isInstanceOf(NoSuchBeanDefinitionException.class) .hasMessageContaining("No bean named 'jobExecutionEventsListener' available"); }); } private void testDisabledConfiguration(String property, String disabledListener) { String disabledPropertyArg = (property != null) ? "--" + property + "=false" : ""; ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PropertyPlaceholderAutoConfiguration.class, SimpleTaskAutoConfiguration.class, SingleTaskConfiguration.class)) .withUserConfiguration(BatchEventAutoConfiguration.JobExecutionListenerConfiguration.class) .withBean("org.springframework.cloud.task.batch.listener.JobExecutionEventTests$BatchEventTestApplication", BatchEventTestApplication.class) .withPropertyValues("--spring.cloud.task.closecontext_enabled=false", "--spring.main.web-environment=false", disabledPropertyArg); applicationContextRunner.run((context) -> { boolean exceptionThrown = false; for (String beanName : LISTENER_BEAN_NAMES) { if (disabledListener != null && disabledListener.equals(beanName)) { try { context.getBean(disabledListener); } catch (NoSuchBeanDefinitionException nsbde) { exceptionThrown = true; } assertThat(exceptionThrown).as(String.format("Did not expect %s bean in context", beanName)) .isTrue(); } else { context.getBean(beanName); } } }); } @SpringBootApplication public static class BatchEventTestApplication { } } ================================================ FILE: spring-cloud-task-stream/src/test/java/org/springframework/cloud/task/batch/listener/JobInstanceEventTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import org.junit.jupiter.api.Test; import org.springframework.cloud.task.batch.listener.support.JobInstanceEvent; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro */ public class JobInstanceEventTests { private static final long INSTANCE_ID = 1; private static final String JOB_NAME = "FOOBAR"; @Test public void testConstructor() { JobInstanceEvent jobInstanceEvent = new JobInstanceEvent(INSTANCE_ID, JOB_NAME); assertThat(jobInstanceEvent.getInstanceId()).isEqualTo(INSTANCE_ID); assertThat(jobInstanceEvent.getJobName()).isEqualTo(JOB_NAME); } @Test public void testEmptyConstructor() { JobInstanceEvent jobInstanceEvent = new JobInstanceEvent(); assertThat(jobInstanceEvent.getJobName()).isNull(); } @Test public void testEmptyConstructorEmptyId() { JobInstanceEvent jobInstanceEvent = new JobInstanceEvent(); jobInstanceEvent.getInstanceId(); } @Test public void testToString() { JobInstanceEvent jobInstanceEvent = new JobInstanceEvent(INSTANCE_ID, JOB_NAME); assertThat(jobInstanceEvent.toString()).isEqualTo("JobInstanceEvent: id=1, version=null, Job=[FOOBAR]"); } } ================================================ FILE: spring-cloud-task-stream/src/test/java/org/springframework/cloud/task/batch/listener/JobParameterEventTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import java.time.LocalDateTime; import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.parameters.JobParameter; import org.springframework.cloud.task.batch.listener.support.JobParameterEvent; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro */ public class JobParameterEventTests { @Test public void testDefaultConstructor() { JobParameterEvent jobParameterEvent = new JobParameterEvent(); assertThat(jobParameterEvent.getValue()).isNull(); assertThat(jobParameterEvent.isIdentifying()).isFalse(); assertThat(jobParameterEvent).isEqualTo(new JobParameterEvent()); } @Test public void testConstructor() { final String EXPECTED_VALUE = "FOO"; final LocalDateTime EXPECTED_DATE_VALUE = LocalDateTime.now(); JobParameter jobParameter = new JobParameter<>(EXPECTED_VALUE + "Key", EXPECTED_VALUE, String.class); JobParameterEvent jobParameterEvent = new JobParameterEvent(jobParameter); assertThat(jobParameterEvent.getValue()).isEqualTo(EXPECTED_VALUE); assertThat(jobParameterEvent.isIdentifying()).isTrue(); jobParameter = new JobParameter<>("dateKey", EXPECTED_DATE_VALUE, LocalDateTime.class); jobParameterEvent = new JobParameterEvent(jobParameter); assertThat(jobParameterEvent.getValue()).isEqualTo(EXPECTED_DATE_VALUE); assertThat(jobParameterEvent.isIdentifying()).isTrue(); assertThat(new JobParameterEvent(jobParameter).equals(jobParameterEvent)).isTrue(); } @Test public void testEquals() { final String EXPECTED_VALUE = "FOO"; JobParameter jobParameter = new JobParameter<>(EXPECTED_VALUE + "Key", EXPECTED_VALUE, String.class); JobParameterEvent jobParameterEvent = new JobParameterEvent(jobParameter); JobParameterEvent anotherJobParameterEvent = new JobParameterEvent(jobParameter); assertThat(jobParameterEvent.equals(jobParameterEvent)).isTrue(); assertThat(jobParameterEvent.equals("nope")).isFalse(); assertThat(jobParameterEvent.equals(anotherJobParameterEvent)).isTrue(); } @Test public void testValidHashCode() { final String EXPECTED_VALUE = "FOO"; JobParameter jobParameter = new JobParameter<>(EXPECTED_VALUE + "Key", EXPECTED_VALUE, String.class); JobParameterEvent jobParameterEvent = new JobParameterEvent(jobParameter); assertThat(jobParameterEvent.hashCode()).isNotNull(); } } ================================================ FILE: spring-cloud-task-stream/src/test/java/org/springframework/cloud/task/batch/listener/JobParametersEventTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import java.util.Date; import java.util.HashSet; import java.util.Set; import org.junit.jupiter.api.Test; import org.springframework.batch.core.job.parameters.JobParameter; import org.springframework.cloud.task.batch.listener.support.JobParameterEvent; import org.springframework.cloud.task.batch.listener.support.JobParametersEvent; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro */ public class JobParametersEventTests { private final static String DATE_KEY = "DATE_KEY"; private final static String STRING_KEY = "STRING_KEY"; private final static String LONG_KEY = "LONG_KEY"; private final static String DOUBLE_KEY = "DOUBLE_KEY"; private final static JobParameter STRING_PARAM = new JobParameter<>(STRING_KEY, "FOO", String.class); private final static JobParameter DATE_PARAM = new JobParameter<>(DATE_KEY, new Date(), Date.class); private final static JobParameter LONG_PARAM = new JobParameter<>(LONG_KEY, 1L, Long.class); private final static JobParameter DOUBLE_PARAM = new JobParameter<>(DOUBLE_KEY, 2D, Double.class); @Test public void testDefaultConstructor() { JobParametersEvent jobParametersEvent = new JobParametersEvent(); assertThat(jobParametersEvent.getParameters().size()).isEqualTo(0); assertThat(jobParametersEvent.isEmpty()).isTrue(); } @Test public void testConstructor() { JobParametersEvent jobParametersEvent = getPopulatedParametersEvent(); Set jobParameters = jobParametersEvent.getParameters(); assertThat(jobParametersEvent.getParameters()).contains(new JobParameterEvent(STRING_PARAM), new JobParameterEvent(DATE_PARAM), new JobParameterEvent(LONG_PARAM), new JobParameterEvent(DOUBLE_PARAM)); JobParametersEvent jobParametersEventNew = getPopulatedParametersEvent(); assertThat(jobParametersEvent).isEqualTo(jobParametersEventNew); } @Test public void testEquals() { assertThat(getPopulatedParametersEvent().equals(getPopulatedParametersEvent())).isTrue(); JobParametersEvent jobParametersEvent = getPopulatedParametersEvent(); assertThat(jobParametersEvent.equals("FOO")).isFalse(); assertThat(jobParametersEvent.equals(jobParametersEvent)).isTrue(); } @Test public void testHashCode() { JobParametersEvent jobParametersEvent = new JobParametersEvent(); assertThat(jobParametersEvent.hashCode()).isNotNull(); JobParametersEvent jobParametersEventPopulated = getPopulatedParametersEvent(); assertThat(jobParametersEvent).isNotNull(); assertThat(jobParametersEventPopulated.hashCode()).isNotEqualTo(jobParametersEvent.hashCode()); } @Test public void testToString() { JobParametersEvent jobParametersEvent = getPopulatedParametersEvent(); assertThat(toString()).isNotNull(); } public JobParametersEvent getPopulatedParametersEvent() { Set> jobParameters = new HashSet<>(); jobParameters.add(DATE_PARAM); jobParameters.add(STRING_PARAM); jobParameters.add(LONG_PARAM); jobParameters.add(DOUBLE_PARAM); return new JobParametersEvent(jobParameters); } } ================================================ FILE: spring-cloud-task-stream/src/test/java/org/springframework/cloud/task/batch/listener/StepExecutionEventTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import java.time.LocalDateTime; import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.job.JobExecution; import org.springframework.batch.core.job.JobInstance; import org.springframework.batch.core.job.parameters.JobParameters; import org.springframework.batch.core.step.StepExecution; import org.springframework.batch.infrastructure.item.ExecutionContext; import org.springframework.cloud.task.batch.listener.support.ExitStatus; import org.springframework.cloud.task.batch.listener.support.StepExecutionEvent; import static org.assertj.core.api.Assertions.assertThat; /** * @author Glenn Renfro */ public class StepExecutionEventTests { private static final String JOB_NAME = "FOO_JOB"; private static final String STEP_NAME = "STEP_NAME"; private static final Long JOB_INSTANCE_ID = 1L; private static final Long JOB_EXECUTION_ID = 2L; @Test public void testBasic() { StepExecution stepExecution = getBasicStepExecution(); stepExecution.setCommitCount(1); stepExecution.setReadCount(2); stepExecution.setWriteCount(3); stepExecution.setReadSkipCount(4); stepExecution.setWriteSkipCount(5); StepExecutionEvent stepExecutionEvent = new StepExecutionEvent(stepExecution); assertThat(stepExecutionEvent.getStepName()).as("stepName result was not as expected").isEqualTo(STEP_NAME); assertThat(stepExecutionEvent.getStartTime()).as("startTime result was not as expected") .isEqualTo(stepExecution.getStartTime()); assertThat(stepExecutionEvent.getEndTime()).as("endTime result was not as expected") .isEqualTo(stepExecution.getEndTime()); assertThat(stepExecutionEvent.getLastUpdated()).as("lastUpdated result was not as expected") .isEqualTo(stepExecution.getLastUpdated()); assertThat(stepExecutionEvent.getCommitCount()).as("commitCount result was not as expected") .isEqualTo(stepExecution.getCommitCount()); assertThat(stepExecutionEvent.getReadCount()).as("readCount result was not as expected") .isEqualTo(stepExecution.getReadCount()); assertThat(stepExecutionEvent.getReadSkipCount()).as("readSkipCount result was not as expected") .isEqualTo(stepExecution.getReadSkipCount()); assertThat(stepExecutionEvent.getWriteCount()).as("writeCount result was not as expected") .isEqualTo(stepExecution.getWriteCount()); assertThat(stepExecutionEvent.getWriteSkipCount()).as("writeSkipCount result was not as expected") .isEqualTo(stepExecution.getWriteSkipCount()); assertThat(stepExecutionEvent.getSkipCount()).as("skipCount result was not as expected") .isEqualTo(stepExecution.getSkipCount()); } @Test public void testException() { RuntimeException exception = new RuntimeException("EXPECTED EXCEPTION"); StepExecution stepExecution = getBasicStepExecution(); stepExecution.addFailureException(exception); StepExecutionEvent stepExecutionEvent = new StepExecutionEvent(stepExecution); assertThat(stepExecutionEvent.getFailureExceptions().size()).isEqualTo(1); assertThat(stepExecution.getFailureExceptions().get(0)).isEqualTo(exception); } @Test public void testGetSummary() { StepExecution stepExecution = getBasicStepExecution(); StepExecutionEvent stepExecutionEvent = new StepExecutionEvent(stepExecution); assertThat(stepExecutionEvent.getSummary()) .isEqualTo("StepExecutionEvent: id=2, version=null, name=STEP_NAME, status=STARTING," + " exitStatus=EXECUTING, readCount=0, filterCount=0, writeCount=0 readSkipCount=0," + " writeSkipCount=0, processSkipCount=0, commitCount=0, rollbackCount=0"); } @Test public void testHashCode() { StepExecution stepExecution = getBasicStepExecution(); StepExecutionEvent stepExecutionEvent = new StepExecutionEvent(stepExecution); assertThat(stepExecutionEvent.toString()).isEqualTo("StepExecutionEvent: id=2, version=null, " + "name=STEP_NAME, status=STARTING, exitStatus=EXECUTING, " + "readCount=0, filterCount=0, writeCount=0 readSkipCount=0, " + "writeSkipCount=0, processSkipCount=0, commitCount=0, " + "rollbackCount=0, exitDescription="); } @Test public void testToString() { StepExecution stepExecution = getBasicStepExecution(); StepExecutionEvent stepExecutionEvent = new StepExecutionEvent(stepExecution); assertThat(stepExecutionEvent.hashCode()).isNotNull(); } @Test public void testEquals() { StepExecution stepExecution = getBasicStepExecution(); StepExecutionEvent stepExecutionEvent = new StepExecutionEvent(stepExecution); assertThat(stepExecutionEvent.equals(getBasicStepExecution())).isFalse(); } @Test public void testSettersGetters() { StepExecutionEvent stepExecutionEvent = new StepExecutionEvent(getBasicStepExecution()); LocalDateTime date = LocalDateTime.now(); stepExecutionEvent.setLastUpdated(date); assertThat(stepExecutionEvent.getLastUpdated()).isEqualTo(date); stepExecutionEvent.setProcessSkipCount(55); assertThat(stepExecutionEvent.getProcessSkipCount()).isEqualTo(55); stepExecutionEvent.setWriteSkipCount(47); assertThat(stepExecutionEvent.getWriteSkipCount()).isEqualTo(47); stepExecutionEvent.setReadSkipCount(49); assertThat(stepExecutionEvent.getReadSkipCount()).isEqualTo(49); assertThat(stepExecutionEvent.getCommitCount()).isEqualTo(0); stepExecutionEvent.incrementCommitCount(); assertThat(stepExecutionEvent.getCommitCount()).isEqualTo(1); assertThat(stepExecutionEvent.isTerminateOnly()).isFalse(); stepExecutionEvent.setTerminateOnly(); assertThat(stepExecutionEvent.isTerminateOnly()).isTrue(); stepExecutionEvent.setStepName("FOOBAR"); assertThat(stepExecutionEvent.getStepName()).isEqualTo("FOOBAR"); stepExecutionEvent.setStartTime(date); assertThat(stepExecutionEvent.getStartTime()).isEqualTo(date); assertThat(stepExecutionEvent.getRollbackCount()).isEqualTo(0); stepExecutionEvent.setRollbackCount(33); assertThat(stepExecutionEvent.getRollbackCount()).isEqualTo(33); stepExecutionEvent.setFilterCount(23); assertThat(stepExecutionEvent.getFilterCount()).isEqualTo(23); stepExecutionEvent.setWriteCount(11); assertThat(stepExecutionEvent.getWriteCount()).isEqualTo(11); stepExecutionEvent.setReadCount(12); assertThat(stepExecutionEvent.getReadCount()).isEqualTo(12); stepExecutionEvent.setEndTime(date); assertThat(stepExecutionEvent.getEndTime()).isEqualTo(date); stepExecutionEvent.setCommitCount(29); assertThat(stepExecutionEvent.getCommitCount()).isEqualTo(29); } @Test public void testExitStatus() { StepExecutionEvent stepExecutionEvent = new StepExecutionEvent(getBasicStepExecution()); final String EXIT_CODE = "1"; final String EXIT_DESCRIPTION = "EXPECTED FAILURE"; ExitStatus exitStatus = new ExitStatus(); exitStatus.setExitCode(EXIT_CODE); exitStatus.setExitDescription(EXIT_DESCRIPTION); stepExecutionEvent.setExitStatus(exitStatus); ExitStatus actualExitStatus = stepExecutionEvent.getExitStatus(); assertThat(actualExitStatus).isNotNull(); assertThat(actualExitStatus.getExitCode()).isEqualTo(exitStatus.getExitCode()); assertThat(actualExitStatus.getExitDescription()).isEqualTo(exitStatus.getExitDescription()); } @Test public void testBatchStatus() { StepExecutionEvent stepExecutionEvent = new StepExecutionEvent(getBasicStepExecution()); assertThat(stepExecutionEvent.getStatus()).isEqualTo(BatchStatus.STARTING); stepExecutionEvent.setStatus(BatchStatus.ABANDONED); assertThat(stepExecutionEvent.getStatus()).isEqualTo(BatchStatus.ABANDONED); } @Test public void testDefaultConstructor() { StepExecutionEvent stepExecutionEvent = new StepExecutionEvent(); assertThat(stepExecutionEvent.getStatus()).isEqualTo(BatchStatus.STARTING); assertThat(stepExecutionEvent.getExitStatus()).isNotNull(); assertThat(stepExecutionEvent.getExitStatus().getExitCode()).isEqualTo("EXECUTING"); } @Test public void testExecutionContext() { ExecutionContext executionContext = new ExecutionContext(); executionContext.put("hello", "world"); StepExecutionEvent stepExecutionEvent = new StepExecutionEvent(getBasicStepExecution()); assertThat(stepExecutionEvent.getExecutionContext()).isNotNull(); stepExecutionEvent.setExecutionContext(executionContext); assertThat(stepExecutionEvent.getExecutionContext().getString("hello")).isEqualTo("world"); } private StepExecution getBasicStepExecution() { JobInstance jobInstance = new JobInstance(JOB_INSTANCE_ID, JOB_NAME); JobParameters jobParameters = new JobParameters(); JobExecution jobExecution = new JobExecution(JOB_EXECUTION_ID, jobInstance, jobParameters); return new StepExecution(1, STEP_NAME, jobExecution); } } ================================================ FILE: spring-cloud-task-stream/src/test/java/org/springframework/cloud/task/batch/listener/TaskBatchEventListenerBeanPostProcessorTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.batch.core.listener.ChunkListener; import org.springframework.batch.core.listener.ItemProcessListener; import org.springframework.batch.core.listener.ItemReadListener; import org.springframework.batch.core.listener.ItemWriteListener; import org.springframework.batch.core.listener.SkipListener; import org.springframework.batch.core.listener.StepExecutionListener; import org.springframework.batch.core.step.item.ChunkOrientedTasklet; import org.springframework.batch.core.step.item.SimpleChunkProcessor; import org.springframework.batch.core.step.item.SimpleChunkProvider; import org.springframework.batch.core.step.tasklet.TaskletStep; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.task.batch.listener.support.TaskBatchEventListenerBeanPostProcessor; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.support.GenericApplicationContext; import org.springframework.test.context.bean.override.mockito.MockitoBean; import org.springframework.test.context.junit.jupiter.SpringExtension; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.when; /** * @author Glenn Renfro */ @ExtendWith(SpringExtension.class) @SpringBootTest public class TaskBatchEventListenerBeanPostProcessorTests { @MockitoBean ItemProcessListener itemProcessListener; @MockitoBean StepExecutionListener stepExecutionListener; @MockitoBean ChunkListener chunkListener; @MockitoBean ItemReadListener itemReadListener; @MockitoBean ItemWriteListener itemWriteListener; @MockitoBean SkipListener skipListener; @MockitoBean private TaskletStep taskletStep; @MockitoBean private SimpleChunkProvider chunkProvider; @MockitoBean private SimpleChunkProcessor chunkProcessor; @Autowired private GenericApplicationContext context; @BeforeEach public void setupMock() { when(this.taskletStep.getTasklet()) .thenReturn(new ChunkOrientedTasklet(this.chunkProvider, this.chunkProcessor)); when(this.taskletStep.getName()).thenReturn("FOOOBAR"); registerAlias(ItemProcessListener.class, BatchEventAutoConfiguration.ITEM_PROCESS_EVENTS_LISTENER); registerAlias(StepExecutionListener.class, BatchEventAutoConfiguration.STEP_EXECUTION_EVENTS_LISTENER); registerAlias(ChunkListener.class, BatchEventAutoConfiguration.CHUNK_EVENTS_LISTENER); registerAlias(ItemReadListener.class, BatchEventAutoConfiguration.ITEM_READ_EVENTS_LISTENER); registerAlias(ItemWriteListener.class, BatchEventAutoConfiguration.ITEM_WRITE_EVENTS_LISTENER); registerAlias(SkipListener.class, BatchEventAutoConfiguration.SKIP_EVENTS_LISTENER); } @Test public void testPostProcessor() { TaskBatchEventListenerBeanPostProcessor postProcessor = this.context .getBean(TaskBatchEventListenerBeanPostProcessor.class); assertThat(postProcessor).isNotNull(); TaskletStep updatedTaskletStep = (TaskletStep) postProcessor.postProcessBeforeInitialization(this.taskletStep, "FOO"); assertThat(updatedTaskletStep).isEqualTo(this.taskletStep); } private void registerAlias(Class clazz, String name) { assertThat(this.context.getBeanNamesForType(clazz).length).isEqualTo(1); this.context.registerAlias(this.context.getBeanNamesForType(clazz)[0], name); } @Configuration(proxyBeanMethods = false) @EnableAutoConfiguration public static class TestConfiguration { @Bean public static TaskBatchEventListenerBeanPostProcessor taskBatchEventListenerBeanPostProcessor() { return new TaskBatchEventListenerBeanPostProcessor(); } } } ================================================ FILE: spring-cloud-task-stream/src/test/java/org/springframework/cloud/task/batch/listener/support/TaskBatchEventListenerBeanPostProcessorRuntimeHintTests.java ================================================ /* * Copyright 2022-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.batch.listener.support; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.aot.hint.RuntimeHints; import org.springframework.batch.core.step.item.ChunkOrientedTasklet; import org.springframework.cloud.task.batch.listener.support.TaskBatchEventListenerBeanPostProcessor.RuntimeHint; import org.springframework.util.ReflectionUtils; import static org.assertj.core.api.Assertions.assertThat; import static org.springframework.aot.hint.predicate.RuntimeHintsPredicates.reflection; /** * @author Henning Pöttker */ class TaskBatchEventListenerBeanPostProcessorRuntimeHintTests { private RuntimeHints hints; @BeforeEach void setUp() { this.hints = new RuntimeHints(); new RuntimeHint().registerHints(this.hints, getClass().getClassLoader()); } @Test void reflectionOnChunkProviderFieldIsAllowed() { var field = ReflectionUtils.findField(ChunkOrientedTasklet.class, "chunkProvider"); assertThat(field).isNotNull(); assertThat(reflection().onField(field)).accepts(this.hints); } @Test void reflectionOnChunkProcessorFieldIsAllowed() { var field = ReflectionUtils.findField(ChunkOrientedTasklet.class, "chunkProcessor"); assertThat(field).isNotNull(); assertThat(reflection().onField(field)).accepts(this.hints); } } ================================================ FILE: spring-cloud-task-stream/src/test/java/org/springframework/cloud/task/listener/TaskEventTests.java ================================================ /* * Copyright 2016-present the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.task.listener; import org.junit.jupiter.api.Test; import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.cloud.stream.binder.test.TestChannelBinderConfiguration; import org.springframework.cloud.task.configuration.EnableTask; import org.springframework.context.ConfigurableApplicationContext; import static org.assertj.core.api.Assertions.assertThat; /** * @author Michael Minella * @author Ilayaperumal Gopinathan * @author Glenn Renfro */ public class TaskEventTests { @Test public void testDefaultConfiguration() { ConfigurableApplicationContext applicationContext = new SpringApplicationBuilder() .sources(TestChannelBinderConfiguration.getCompleteConfiguration(TaskEventsApplication.class)) .web(WebApplicationType.NONE) .build() .run(); assertThat(applicationContext.getBean("taskEventEmitter")).isNotNull(); } @EnableTask @SpringBootApplication public static class TaskEventsApplication { } } ================================================ FILE: src/checkstyle/checkstyle-suppressions.xml ================================================